id
int64 0
10.2k
| text_id
stringlengths 17
67
| repo_owner
stringclasses 232
values | repo_name
stringclasses 295
values | issue_url
stringlengths 39
89
| pull_url
stringlengths 37
87
| comment_url
stringlengths 37
94
| links_count
int64 1
2
| link_keyword
stringclasses 12
values | issue_title
stringlengths 7
197
| issue_body
stringlengths 45
21.3k
| base_sha
stringlengths 40
40
| head_sha
stringlengths 40
40
| diff_url
stringlengths 120
170
| diff
stringlengths 478
132k
| changed_files
stringlengths 47
2.6k
| changed_files_exts
stringclasses 22
values | changed_files_count
int64 1
22
| java_changed_files_count
int64 1
22
| kt_changed_files_count
int64 0
0
| py_changed_files_count
int64 0
0
| code_changed_files_count
int64 1
22
| repo_symbols_count
int64 32.6k
242M
| repo_tokens_count
int64 6.59k
49.2M
| repo_lines_count
int64 992
6.2M
| repo_files_without_tests_count
int64 12
28.1k
| changed_symbols_count
int64 0
36.1k
| changed_tokens_count
int64 0
6.5k
| changed_lines_count
int64 0
561
| changed_files_without_tests_count
int64 1
17
| issue_symbols_count
int64 45
21.3k
| issue_words_count
int64 2
1.39k
| issue_tokens_count
int64 13
4.47k
| issue_lines_count
int64 1
325
| issue_links_count
int64 0
19
| issue_code_blocks_count
int64 0
31
| pull_create_at
timestamp[s] | repo_stars
int64 10
44.3k
| repo_language
stringclasses 8
values | repo_languages
stringclasses 296
values | repo_license
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1,147 | wso2/product-microgateway/1546/1545 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/1545 | https://github.com/wso2/product-microgateway/pull/1546 | https://github.com/wso2/product-microgateway/pull/1546 | 1 | fix | Error reading event hub config | ### Description:
Even the event hub config is enabled the config is not getting effected
### Steps to reproduce:
enable event hub config in the config.toml
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 737f4139548a7fa4a6062c68bd0868df7e8b8445 | e7c6696f71485de5f284358465fd005f413bad77 | https://github.com/wso2/product-microgateway/compare/737f4139548a7fa4a6062c68bd0868df7e8b8445...e7c6696f71485de5f284358465fd005f413bad77 | diff --git a/enforcer/src/main/java/org/wso2/micro/gateway/enforcer/config/dto/EventHubConfigurationDto.java b/enforcer/src/main/java/org/wso2/micro/gateway/enforcer/config/dto/EventHubConfigurationDto.java
index 4d07919ff..7c0b188cd 100644
--- a/enforcer/src/main/java/org/wso2/micro/gateway/enforcer/config/dto/EventHubConfigurationDto.java
+++ b/enforcer/src/main/java/org/wso2/micro/gateway/enforcer/config/dto/EventHubConfigurationDto.java
@@ -24,17 +24,17 @@ import java.util.Properties;
* Holds the configurations related to connecting with APIM event hub node.
*/
public class EventHubConfigurationDto {
- private boolean enabled;
+ private boolean enable;
private String serviceUrl;
private int initDelay = 0;
private Properties jmsConnectionParameters = new Properties();
public boolean isEnabled() {
- return enabled;
+ return enable;
}
- public void setEnabled(boolean enabled) {
- this.enabled = enabled;
+ public void setEnable(boolean enable) {
+ this.enable = enable;
}
public String getServiceUrl() { | ['enforcer/src/main/java/org/wso2/micro/gateway/enforcer/config/dto/EventHubConfigurationDto.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 374,160 | 76,210 | 10,897 | 92 | 263 | 48 | 8 | 1 | 564 | 81 | 119 | 25 | 0 | 0 | 1970-01-01T00:26:50 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,138 | wso2/product-microgateway/1969/1963 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/1963 | https://github.com/wso2/product-microgateway/pull/1969 | https://github.com/wso2/product-microgateway/pull/1969 | 1 | fix | Issue in non secured blocked API's resource | ### Description:
<!-- Describe the issue -->
When turned off the security for the resource once, blocked state not applied to the resource.
### Steps to reproduce:
create API, deploy, publish, invoke
block API, invoke,-> fails as expected.
remove security for the resource. invoke -> successful invoke, this is erroneous.
try enable security, publish API again and block again. -> nothing blocks the API now.
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 92d236d2342d051b52615a9b5cd2c54c65e4b10f | f06084abddf40c5d5199c783f684e33cd4b6ee71 | https://github.com/wso2/product-microgateway/compare/92d236d2342d051b52615a9b5cd2c54c65e4b10f...f06084abddf40c5d5199c783f684e33cd4b6ee71 | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/UnsecuredAPIAuthenticator.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/UnsecuredAPIAuthenticator.java
index 40c801c78..abc10eb9e 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/UnsecuredAPIAuthenticator.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/UnsecuredAPIAuthenticator.java
@@ -20,9 +20,14 @@ package org.wso2.choreo.connect.enforcer.security.jwt;
import org.wso2.choreo.connect.enforcer.api.RequestContext;
import org.wso2.choreo.connect.enforcer.api.config.ResourceConfig;
+import org.wso2.choreo.connect.enforcer.constants.APIConstants;
+import org.wso2.choreo.connect.enforcer.constants.GeneralErrorCodeConstants;
import org.wso2.choreo.connect.enforcer.exception.APISecurityException;
+import org.wso2.choreo.connect.enforcer.models.API;
import org.wso2.choreo.connect.enforcer.security.AuthenticationContext;
import org.wso2.choreo.connect.enforcer.security.Authenticator;
+import org.wso2.choreo.connect.enforcer.subscription.SubscriptionDataHolder;
+import org.wso2.choreo.connect.enforcer.subscription.SubscriptionDataStore;
import org.wso2.choreo.connect.enforcer.util.FilterUtils;
/**
@@ -31,7 +36,8 @@ import org.wso2.choreo.connect.enforcer.util.FilterUtils;
public class UnsecuredAPIAuthenticator implements Authenticator {
- @Override public boolean canAuthenticate(RequestContext requestContext) {
+ @Override
+ public boolean canAuthenticate(RequestContext requestContext) {
// Retrieve the disable security value. If security is disabled, then you can proceed directly with the
// authentication.
if (isDisableSecurity(requestContext.getMatchedResourcePath())) {
@@ -40,7 +46,22 @@ public class UnsecuredAPIAuthenticator implements Authenticator {
return false;
}
- @Override public AuthenticationContext authenticate(RequestContext requestContext) throws APISecurityException {
+ @Override
+ public AuthenticationContext authenticate(RequestContext requestContext) throws APISecurityException {
+ String uuid = requestContext.getMatchedAPI().getAPIConfig().getUuid();
+ String context = requestContext.getMatchedAPI().getAPIConfig().getBasePath();
+ String apiTenantDomain = FilterUtils.getTenantDomainFromRequestURL(context);
+ SubscriptionDataStore datastore = SubscriptionDataHolder.getInstance()
+ .getTenantSubscriptionStore(apiTenantDomain);
+ API api = datastore.getApiByContextAndVersion(uuid);
+ if (api != null && APIConstants.LifecycleStatus.BLOCKED.equals(api.getLcState())) {
+ requestContext.getProperties()
+ .put(APIConstants.MessageFormat.ERROR_MESSAGE, GeneralErrorCodeConstants.API_BLOCKED_MESSAGE);
+ requestContext.getProperties().put(APIConstants.MessageFormat.ERROR_DESCRIPTION,
+ GeneralErrorCodeConstants.API_BLOCKED_DESCRIPTION);
+ throw new APISecurityException(APIConstants.StatusCodes.SERVICE_UNAVAILABLE.getCode(),
+ GeneralErrorCodeConstants.API_BLOCKED_CODE, GeneralErrorCodeConstants.API_BLOCKED_MESSAGE);
+ }
return FilterUtils.generateAuthenticationContext(requestContext);
}
| ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/UnsecuredAPIAuthenticator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,442,320 | 753,617 | 96,080 | 373 | 1,856 | 336 | 25 | 1 | 823 | 120 | 171 | 29 | 0 | 0 | 1970-01-01T00:26:59 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,128 | wso2/product-microgateway/2346/2345 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2345 | https://github.com/wso2/product-microgateway/pull/2346 | https://github.com/wso2/product-microgateway/pull/2346 | 1 | fixes | Cannot invoke non secured resource | ### Description:
<!-- Describe the issue -->
gets 500 error with following log
```
java.lang.NullPointerException
choreo-connect-with-apim-enforcer-1 | at com.google.protobuf.Value$Builder.setStringValue(Value.java:1063)
choreo-connect-with-apim-enforcer-1 | at org.wso2.choreo.connect.enforcer.grpc.ExtAuthService.lambda$buildResponse$3(ExtAuthService.java:155)
choreo-connect-with-apim-enforcer-1 | at java.base/java.util.HashMap.forEach(Unknown Source)
choreo-connect-with-apim-enforcer-1 | at org.wso2.choreo.connect.enforcer.grpc.ExtAuthService.buildResponse(ExtAuthService.java:154)
choreo-connect-with-apim-enforcer-1 | at org.wso2.choreo.connect.enforcer.grpc.ExtAuthService.check(ExtAuthService.java:74)
choreo-connect-with-apim-enforcer-1 | at io.envoyproxy.envoy.service.auth.v3.AuthorizationGrpc$MethodHandlers.invoke(AuthorizationGrpc.java:245)
choreo-connect-with-apim-enforcer-1 | at io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:180)
choreo-connect-with-apim-enforcer-1 | at io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:331)
choreo-connect-with-apim-enforcer-1 | at io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:814)
choreo-connect-with-apim-enforcer-1 | at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
choreo-connect-with-apim-enforcer-1 | at io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
choreo-connect-with-apim-enforcer-1 | at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
choreo-connect-with-apim-enforcer-1 | at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
choreo-connect-with-apim-enforcer-1 | at java.base/java.lang.Thread.run(Unknown Source)
```
### Steps to reproduce:
create an api
disable security on a resource
invoke the resource
### analyse
this is due to new metadata added by https://github.com/wso2/product-microgateway/pull/2330/ and they are being null for non secured resource
raw token is null when resource is unsecured
https://github.com/wso2/product-microgateway/blob/d66797c2a2b056b594cb1b4708820c80d504f635/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java#L273
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| d66797c2a2b056b594cb1b4708820c80d504f635 | b78a4f7c6ad762d01dc5d9edc9198c352f60cb01 | https://github.com/wso2/product-microgateway/compare/d66797c2a2b056b594cb1b4708820c80d504f635...b78a4f7c6ad762d01dc5d9edc9198c352f60cb01 | diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
index a2ce09436..c6b8813bb 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
@@ -43,6 +43,7 @@ import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
/**
* This is the filter handling the authentication for the requests flowing through the gateway.
@@ -129,7 +130,7 @@ public class AuthFilter implements Filter {
canAuthenticated = true;
AuthenticationResponse authenticateResponse = authenticate(authenticator, requestContext);
if (authenticateResponse.isAuthenticated() && !authenticateResponse.isContinueToNextAuthenticator()) {
- setInterceptorAuthContext(authenticator, requestContext);
+ setInterceptorAuthContextMetadata(authenticator, requestContext);
return true;
}
}
@@ -266,11 +267,14 @@ public class AuthFilter implements Filter {
StringUtils.join(retryConfig.getStatusCodes(), ","));
}
- private void setInterceptorAuthContext(Authenticator authenticator, RequestContext requestContext) {
+ private void setInterceptorAuthContextMetadata(Authenticator authenticator, RequestContext requestContext) {
// add auth context to metadata, lua script will add it to the auth context of the interceptor
AuthenticationContext authContext = requestContext.getAuthenticationContext();
- requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.TOKEN_TYPE, authenticator.getName());
- requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.TOKEN, authContext.getRawToken());
- requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.KEY_TYPE, authContext.getKeyType());
+ requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.TOKEN_TYPE,
+ Objects.toString(authenticator.getName(), ""));
+ requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.TOKEN,
+ Objects.toString(authContext.getRawToken(), ""));
+ requestContext.addMetadataToMap(InterceptorConstants.AuthContextFields.KEY_TYPE,
+ Objects.toString(authContext.getKeyType(), ""));
}
} | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,944,300 | 859,918 | 109,807 | 449 | 1,229 | 203 | 14 | 1 | 2,783 | 175 | 702 | 50 | 2 | 1 | 1970-01-01T00:27:15 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,132 | wso2/product-microgateway/2142/2081 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2081 | https://github.com/wso2/product-microgateway/pull/2142 | https://github.com/wso2/product-microgateway/pull/2142 | 1 | fix | Internal key caching issue | ### Description:
<!-- Describe the issue -->
### Steps to reproduce:
use a valid internal key and invoke a created api
then tamper with the key (delete some characters) and use it to invoke the same api. -> it works (but this should not work)
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
beta
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| bc0220ca5f89f04a9d7ae6e9f7707eb3fa2d3975 | 695b16ebc6700ff5a11ac67270ca0533021e62bb | https://github.com/wso2/product-microgateway/compare/bc0220ca5f89f04a9d7ae6e9f7707eb3fa2d3975...695b16ebc6700ff5a11ac67270ca0533021e62bb | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/JWTTokenPayloadInfo.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/JWTTokenPayloadInfo.java
index f07b0096e..d60cbaea3 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/JWTTokenPayloadInfo.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/JWTTokenPayloadInfo.java
@@ -28,7 +28,7 @@ public class JWTTokenPayloadInfo implements Serializable {
JWTClaimsSet payload;
- String rawPayload;
+ String accessToken;
public JWTClaimsSet getPayload() {
return payload;
@@ -38,11 +38,11 @@ public class JWTTokenPayloadInfo implements Serializable {
this.payload = payload;
}
- public String getRawPayload() {
- return rawPayload;
+ public String getAccessToken() {
+ return accessToken;
}
- public void setRawPayload(String rawPayload) {
- this.rawPayload = rawPayload;
+ public void setAccessToken(String accessToken) {
+ this.accessToken = accessToken;
}
}
diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
index 69c91686b..f3201d068 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
@@ -122,9 +122,11 @@ public class InternalAPIKeyAuthenticator implements Authenticator {
JWTTokenPayloadInfo jwtTokenPayloadInfo = (JWTTokenPayloadInfo)
CacheProvider.getGatewayInternalKeyDataCache().getIfPresent(tokenIdentifier);
if (jwtTokenPayloadInfo != null) {
- String rawPayload = jwtTokenPayloadInfo.getRawPayload();
- isVerified = rawPayload.equals(splitToken[1]) && !isJwtTokenExpired(payload);
- } else if (CacheProvider.getInvalidGatewayInternalKeyCache().getIfPresent(tokenIdentifier) != null) {
+ String cachedToken = jwtTokenPayloadInfo.getAccessToken();
+ isVerified = cachedToken.equals(internalKey) && !isJwtTokenExpired(payload);
+ } else if (CacheProvider.getInvalidGatewayInternalKeyCache().getIfPresent(tokenIdentifier) != null
+ && internalKey
+ .equals(CacheProvider.getInvalidGatewayInternalKeyCache().getIfPresent(tokenIdentifier))) {
if (log.isDebugEnabled()) {
log.debug("Internal Key retrieved from the invalid internal Key cache. Internal Key: "
+ FilterUtils.getMaskedToken(splitToken[0]));
@@ -168,7 +170,7 @@ public class InternalAPIKeyAuthenticator implements Authenticator {
}
jwtTokenPayloadInfo = new JWTTokenPayloadInfo();
jwtTokenPayloadInfo.setPayload(payload);
- jwtTokenPayloadInfo.setRawPayload(splitToken[1]);
+ jwtTokenPayloadInfo.setAccessToken(internalKey);
CacheProvider.getGatewayInternalKeyDataCache().put(tokenIdentifier, jwtTokenPayloadInfo);
}
@@ -182,7 +184,7 @@ public class InternalAPIKeyAuthenticator implements Authenticator {
requestContext.getMatchedAPI().getAPIConfig().getUuid());
} else {
CacheProvider.getGatewayInternalKeyDataCache().invalidate(payload.getJWTID());
- CacheProvider.getInvalidGatewayInternalKeyCache().put(payload.getJWTID(), "carbon.super");
+ CacheProvider.getInvalidGatewayInternalKeyCache().put(payload.getJWTID(), internalKey);
throw new APISecurityException(APIConstants.StatusCodes.UNAUTHENTICATED.getCode(),
APISecurityConstants.API_AUTH_INVALID_CREDENTIALS,
APISecurityConstants.API_AUTH_INVALID_CREDENTIALS_MESSAGE);
diff --git a/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/standalone/jwtValidator/InternalKeyTestCase.java b/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/standalone/jwtValidator/InternalKeyTestCase.java
index b1700234c..bba6faf68 100644
--- a/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/standalone/jwtValidator/InternalKeyTestCase.java
+++ b/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/standalone/jwtValidator/InternalKeyTestCase.java
@@ -32,13 +32,30 @@ import java.util.Map;
public class InternalKeyTestCase {
protected String internalKey;
+ protected String tamperedInternalKey;
@BeforeClass(description = "initialise the setup")
void start() throws Exception {
internalKey = TokenUtil.getJwtForPetstore(TestConstant.KEY_TYPE_PRODUCTION, null, true);
+ tamperedInternalKey = internalKey.substring(0, internalKey.length()-4);
}
+ // First invoke with tampered internal key. This should fail.
@Test(description = "Test to check the InternalKey is working")
+ public void invokeWithTamperedInternalKey() throws Exception {
+ // Set header
+ Map<String, String> headers = new HashMap<>();
+ headers.put("Internal-Key", tamperedInternalKey);
+ HttpResponse response = HttpsClientRequest.doGet(Utils.getServiceURLHttps("/v2/pet/2") , headers);
+
+ Assert.assertNotNull(response);
+ Assert.assertEquals(response.getResponseCode(), HttpStatus.SC_UNAUTHORIZED,"Response code mismatched");
+ Assert.assertTrue(response.getData().contains("Invalid Credentials"), "Error response message mismatch");
+ }
+
+ // When invoke with original token even though the tampered key is in the invalid key cache,
+ // original token should pass.
+ @Test(description = "Test to check the InternalKey is working", dependsOnMethods = "invokeWithTamperedInternalKey")
public void invokeInternalKeyHeaderSuccessTest() throws Exception {
// Set header
Map<String, String> headers = new HashMap<>();
@@ -61,6 +78,19 @@ public class InternalKeyTestCase {
Assert.assertTrue(response.getData().contains("Invalid Credentials"), "Error response message mismatch");
}
+ // After invoking with original key, it is cacahed as a success token. But again using the tampered key should fail.
+ @Test(description = "Test to check the InternalKey is working", dependsOnMethods = "invokeInternalKeyHeaderSuccessTest")
+ public void invokeAgainWithTamperedInternalKey() throws Exception {
+ // Set header
+ Map<String, String> headers = new HashMap<>();
+ headers.put("Internal-Key", tamperedInternalKey);
+ HttpResponse response = HttpsClientRequest.doGet(Utils.getServiceURLHttps("/v2/pet/2") , headers);
+
+ Assert.assertNotNull(response);
+ Assert.assertEquals(response.getResponseCode(), HttpStatus.SC_UNAUTHORIZED,"Response code mismatched");
+ Assert.assertTrue(response.getData().contains("Invalid Credentials"), "Error response message mismatch");
+ }
+
@Test(description = "Test to check the internal key auth validate expired token")
public void invokeExpiredInternalKeyTest() throws Exception {
| ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/JWTTokenPayloadInfo.java', 'enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java', 'integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/standalone/jwtValidator/InternalKeyTestCase.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 3,407,951 | 745,348 | 95,041 | 370 | 1,482 | 263 | 22 | 2 | 658 | 101 | 143 | 27 | 0 | 0 | 1970-01-01T00:27:05 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,134 | wso2/product-microgateway/2112/2114 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2114 | https://github.com/wso2/product-microgateway/pull/2112 | https://github.com/wso2/product-microgateway/pull/2112 | 1 | fixes | Incorrect API-UUID is published to analytics for Internal-Key invocation | ### Description:
The API UUID published to the analytics for internal Key invocation, is incorrect at the moment. Instead of the UUID assigned from APIM Publisher, it publishes a hash value.
This is due to authentication Context's API UUID is not being populated properly when InternalKey Authentication is used.
### Steps to reproduce:
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 85e8639833f2f94877a2cc34f6df70dc257a751f | dff54eb2fc8defd4c7019c8da81a5eba9af4fc15 | https://github.com/wso2/product-microgateway/compare/85e8639833f2f94877a2cc34f6df70dc257a751f...dff54eb2fc8defd4c7019c8da81a5eba9af4fc15 | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
index cdeaa3615..69c91686b 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
@@ -178,7 +178,8 @@ public class InternalAPIKeyAuthenticator implements Authenticator {
log.debug("Internal Key authentication successful.");
}
return FilterUtils.generateAuthenticationContext(tokenIdentifier, payload, api,
- requestContext.getMatchedAPI().getAPIConfig().getTier());
+ requestContext.getMatchedAPI().getAPIConfig().getTier(),
+ requestContext.getMatchedAPI().getAPIConfig().getUuid());
} else {
CacheProvider.getGatewayInternalKeyDataCache().invalidate(payload.getJWTID());
CacheProvider.getInvalidGatewayInternalKeyCache().put(payload.getJWTID(), "carbon.super");
diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
index e0b67ec6d..4ba7378a2 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
@@ -202,7 +202,10 @@ public class FilterUtils {
authContext.setStopOnQuotaReach(true);
authContext.setConsumerKey(null);
authContext.setCallerToken(null);
-
+ String apiUUID = requestContext.getMatchedAPI().getAPIConfig().getUuid();
+ if (!StringUtils.isEmpty(apiUUID)) {
+ authContext.setApiUUID(apiUUID);
+ }
return authContext;
}
@@ -293,7 +296,8 @@ public class FilterUtils {
* @throws java.text.ParseException
*/
public static AuthenticationContext generateAuthenticationContext(String tokenIdentifier, JWTClaimsSet payload,
- JSONObject api, String apiLevelPolicy)
+ JSONObject api, String apiLevelPolicy,
+ String apiUUID)
throws java.text.ParseException {
AuthenticationContext authContext = new AuthenticationContext();
@@ -305,7 +309,6 @@ public class FilterUtils {
} else {
authContext.setKeyType(APIConstants.API_KEY_TYPE_PRODUCTION);
}
-
authContext.setApiTier(apiLevelPolicy);
if (api != null) {
authContext.setTier(APIConstants.UNLIMITED_TIER);
@@ -313,6 +316,9 @@ public class FilterUtils {
authContext.setApiPublisher(api.getAsString(APIConstants.JwtTokenConstants.API_PUBLISHER));
}
+ if (!StringUtils.isEmpty(apiUUID)) {
+ authContext.setApiUUID(apiUUID);
+ }
authContext.setApplicationName(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME);
authContext.setApplicationId(UUID.nameUUIDFromBytes(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME.
getBytes(StandardCharsets.UTF_8)).toString()); | ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java', 'enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,406,704 | 745,138 | 95,026 | 370 | 858 | 128 | 15 | 2 | 748 | 108 | 151 | 26 | 0 | 0 | 1970-01-01T00:27:05 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,135 | wso2/product-microgateway/2062/2061 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2061 | https://github.com/wso2/product-microgateway/pull/2062 | https://github.com/wso2/product-microgateway/pull/2062 | 1 | fix | Envoy access log entry should be tracked in enforcer as well | ### Description:
In envoy we can print the random UUID for each request in access logs using the parameter `%REQ(X-REQUEST-ID)%` .
This is useful to track the request. We should use the same tracking ID in the enforcer side access logs as well.
### Steps to reproduce:
Currently enforcer prints id attribute of the http request(https://www.envoyproxy.io/docs/envoy/latest/api-v3/service/auth/v3/attribute_context.proto#service-auth-v3-attributecontext-httprequest), which is different from the UUID printed by X-REQUEST-ID in envoy access log.
### Affected Product Version:
0.9.0
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| aaf9992948b69f4295ea857d8efb676e5c72a68c | 69a8252f445cb8ac1da0c0c799f0ead5089bcb18 | https://github.com/wso2/product-microgateway/compare/aaf9992948b69f4295ea857d8efb676e5c72a68c...69a8252f445cb8ac1da0c0c799f0ead5089bcb18 | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/HttpConstants.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/HttpConstants.java
index 300f10bcd..898312a2b 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/HttpConstants.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/HttpConstants.java
@@ -25,4 +25,5 @@ public class HttpConstants {
public static final int NO_CONTENT_STATUS_CODE = 204;
public static final String OPTIONS = "OPTIONS";
public static final String ALLOW_HEADER = "allow";
+ public static final String X_REQUEST_ID_HEADER = "x-request-id";
}
diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
index c70fa9afa..6c2e17ae4 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
@@ -48,7 +48,9 @@ public class ExtAuthService extends AuthorizationGrpc.AuthorizationImplBase {
@Override
public void check(CheckRequest request, StreamObserver<CheckResponse> responseObserver) {
- ThreadContext.put(APIConstants.LOG_TRACE_ID, request.getAttributes().getRequest().getHttp().getId());
+ ThreadContext.put(APIConstants.LOG_TRACE_ID, request.getAttributes().getRequest().getHttp()
+ .getHeadersOrDefault(HttpConstants.X_REQUEST_ID_HEADER,
+ request.getAttributes().getRequest().getHttp().getId()));
ResponseObject responseObject = requestHandler.process(request);
CheckResponse response = buildResponse(request, responseObject);
responseObserver.onNext(response);
diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/interceptors/AccessLogInterceptor.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/interceptors/AccessLogInterceptor.java
index cdbbf9267..dcdb43141 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/interceptors/AccessLogInterceptor.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/interceptors/AccessLogInterceptor.java
@@ -30,6 +30,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.wso2.choreo.connect.discovery.service.websocket.WebSocketFrameRequest;
import org.wso2.choreo.connect.discovery.service.websocket.WebSocketFrameResponse;
+import org.wso2.choreo.connect.enforcer.constants.HttpConstants;
import org.wso2.choreo.connect.enforcer.websocket.MetadataConstants;
/**
@@ -50,7 +51,12 @@ public class AccessLogInterceptor implements ServerInterceptor {
if (message instanceof CheckRequest) {
CheckRequest checkRequest = (CheckRequest) message;
enforcerServerCall.setStartTime(System.currentTimeMillis());
- enforcerServerCall.setTraceId(checkRequest.getAttributes().getRequest().getHttp().getId());
+ String requestId = checkRequest.getAttributes().getRequest().getHttp().getId();
+ // x-request-id equals to envoy access log entries "%REQ(X-REQUEST-ID)%". This allows
+ // to correlate the same request in Router and the Enforcer. If this header is not coming then
+ // we set the http request Id property as the default value.
+ enforcerServerCall.setTraceId(checkRequest.getAttributes().getRequest().getHttp()
+ .getHeadersOrDefault(HttpConstants.X_REQUEST_ID_HEADER, requestId));
super.onMessage(message);
} else if (message instanceof WebSocketFrameRequest) {
WebSocketFrameRequest webSocketFrameRequest = (WebSocketFrameRequest) message; | ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java', 'enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/interceptors/AccessLogInterceptor.java', 'enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/HttpConstants.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 3,439,811 | 752,757 | 96,007 | 373 | 1,249 | 220 | 13 | 3 | 915 | 118 | 205 | 26 | 1 | 0 | 1970-01-01T00:27:03 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,136 | wso2/product-microgateway/1978/1977 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/1977 | https://github.com/wso2/product-microgateway/pull/1978 | https://github.com/wso2/product-microgateway/pull/1978 | 1 | fixes | When Denied Policies and Analytics Both are enabled, enforcer results in Null Pointer Error | ### Description:
$subject.
### Steps to reproduce:
Enable analytics and start the microgateway.
Enable denied policy.
### Affected Product Version:
choreo-connect-0.9.0-beta
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 02834babd0fe3dc012e6a9946a3519a72f5929d3 | 0538f9dfee7ca15740272d3300f448a611c229c1 | https://github.com/wso2/product-microgateway/compare/02834babd0fe3dc012e6a9946a3519a72f5929d3...0538f9dfee7ca15740272d3300f448a611c229c1 | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
index 87b3dd2b5..f7c1c5f49 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
@@ -76,6 +76,14 @@ public class ChoreoFaultAnalyticsProvider implements AnalyticsDataProvider {
switch (statusCode) {
case 401:
case 403:
+ // For Denied policies, the status code remains 403, but it is categorized
+ // under throttling
+ if (requestContext.getProperties().containsKey(APIConstants.MessageFormat.ERROR_CODE)) {
+ if (AnalyticsConstants.BLOCKED_ERROR_CODE == Integer.parseInt(requestContext.getProperties()
+ .get(APIConstants.MessageFormat.ERROR_CODE).toString())) {
+ return FaultCategory.THROTTLED;
+ }
+ }
return FaultCategory.AUTH;
case 429:
return FaultCategory.THROTTLED; | ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,444,229 | 753,961 | 96,111 | 373 | 575 | 84 | 8 | 1 | 510 | 65 | 114 | 26 | 0 | 0 | 1970-01-01T00:26:59 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,137 | wso2/product-microgateway/1976/1958 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/1958 | https://github.com/wso2/product-microgateway/pull/1976 | https://github.com/wso2/product-microgateway/pull/1976 | 1 | fixes | Analytics list down API list in the analytics tenant drop down | ### Description:
Create multiple APIS for a tenant and invoke them. Each API will list as separate tenant in the analytics drop down.
### Steps to reproduce:
1. create a tenant domain abc.com
2. Create 2 apis with context /foo and /bar
3. Then invoke the APIs and go and view analytics
4. Under the tenant drop down it will list below 2 tenants, which are actually a 2 APIs of a single tenant
```
1. /t/abc.com/foo
2. /t/abc.com/bar
```
### Affected Product Version:
Post 0.9.0-beta
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| ab255b91621fa7e66ffcdb71475e58e04149c7f2 | e64af966434d18f9a29a04609306a07c2c9b74c4 | https://github.com/wso2/product-microgateway/compare/ab255b91621fa7e66ffcdb71475e58e04149c7f2...e64af966434d18f9a29a04609306a07c2c9b74c4 | diff --git a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
index fe9ca3f28..87b3dd2b5 100644
--- a/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
+++ b/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
@@ -96,10 +96,10 @@ public class ChoreoFaultAnalyticsProvider implements AnalyticsDataProvider {
api.setApiType(requestContext.getMatchedAPI().getAPIConfig().getApiType());
api.setApiName(requestContext.getMatchedAPI().getAPIConfig().getName());
api.setApiVersion(requestContext.getMatchedAPI().getAPIConfig().getVersion());
- api.setApiCreatorTenantDomain(FilterUtils.getTenantDomainFromRequestURL(
- requestContext.getMatchedAPI().getAPIConfig().getBasePath()) == null
- ? APIConstants.SUPER_TENANT_DOMAIN_NAME
- : requestContext.getMatchedAPI().getAPIConfig().getBasePath());
+ String tenantDomain = FilterUtils.getTenantDomainFromRequestURL(
+ requestContext.getMatchedAPI().getAPIConfig().getBasePath());
+ api.setApiCreatorTenantDomain(
+ tenantDomain == null ? APIConstants.SUPER_TENANT_DOMAIN_NAME : tenantDomain);
api.setOrganizationId(requestContext.getMatchedAPI().getAPIConfig().getOrganizationId());
return api;
} | ['enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,444,247 | 753,968 | 96,111 | 373 | 593 | 121 | 8 | 1 | 826 | 130 | 204 | 33 | 0 | 1 | 1970-01-01T00:26:59 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,129 | wso2/product-microgateway/2323/2322 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2322 | https://github.com/wso2/product-microgateway/pull/2323 | https://github.com/wso2/product-microgateway/pull/2323 | 1 | fixes | Class cast exception in backend JWT for organizations claim | ### Description:
The following error can be observed when the token has the **organizations** claim and backend JWT generation is enabled.
```
[2021-10-25 11:35:26,350][363acd6ee699def3feb2cd9e722812c1] ERROR - {org.wso2.choreo.connect.enforcer.commons.model.ParameterResolver} - PathTemplate and RawPath is mismatched.
Oct 25, 2021 11:35:26 AM io.grpc.internal.SerializingExecutor run
SEVERE: Exception while executing runnable io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed@1fbab702
java.lang.ClassCastException: class net.minidev.json.JSONArray cannot be cast to class [Ljava.lang.String; (net.minidev.json.JSONArray is in unnamed module of loader 'app'; [Ljava.lang.String; is in module java.base of loader 'bootstrap')
at org.wso2.choreo.connect.enforcer.util.FilterUtils.constructJWTContent(FilterUtils.java:356)
at org.wso2.choreo.connect.enforcer.util.FilterUtils.generateJWTInfoDto(FilterUtils.java:342)
at org.wso2.choreo.connect.enforcer.security.jwt.JWTAuthenticator.authenticate(JWTAuthenticator.java:270)
at org.wso2.choreo.connect.enforcer.security.AuthFilter.authenticate(AuthFilter.java:145)
at org.wso2.choreo.connect.enforcer.security.AuthFilter.handleRequest(AuthFilter.java:125)
at org.wso2.choreo.connect.enforcer.api.API.executeFilterChain(API.java:42)
at org.wso2.choreo.connect.enforcer.api.RestAPI.process(RestAPI.java:151)
at org.wso2.choreo.connect.enforcer.server.HttpRequestHandler.process(HttpRequestHandler.java:57)
at org.wso2.choreo.connect.enforcer.grpc.ExtAuthService.check(ExtAuthService.java:73)
at io.envoyproxy.envoy.service.auth.v3.AuthorizationGrpc$MethodHandlers.invoke(AuthorizationGrpc.java:245)
at io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:180)
at io.grpc.PartialForwardingServerCallListener.onHalfClose(PartialForwardingServerCallListener.java:35)
at io.grpc.ForwardingServerCallListener.onHalfClose(ForwardingServerCallListener.java:23)
at io.grpc.ForwardingServerCallListener$SimpleForwardingServerCallListener.onHalfClose(ForwardingServerCallListener.java:40)
at io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:331)
at io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:814)
at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
at io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
at java.base/java.lang.Thread.run(Unknown Source)
```
### Steps to reproduce:
| 110c51d5c18c6ca6d718413e0c68befd8e3761e8 | cfa3e38e9be1cb504eac667222111f733cec2167 | https://github.com/wso2/product-microgateway/compare/110c51d5c18c6ca6d718413e0c68befd8e3761e8...cfa3e38e9be1cb504eac667222111f733cec2167 | diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
index 8ccd02250..57990879b 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
@@ -19,6 +19,7 @@
package org.wso2.choreo.connect.enforcer.util;
import com.nimbusds.jwt.JWTClaimsSet;
+import net.minidev.json.JSONArray;
import net.minidev.json.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.HttpClient;
@@ -353,7 +354,11 @@ public class FilterUtils {
jwtInfoDto.setSub(sub);
}
if (claims.get(JWTConstants.ORGANIZATIONS) != null) {
- String[] organizations = (String[]) claims.get(JWTConstants.ORGANIZATIONS);
+ JSONArray orgArray = (JSONArray) claims.get(JWTConstants.ORGANIZATIONS);
+ String[] organizations = new String[orgArray.size()];
+ for (int i = 0; i < orgArray.size(); i++) {
+ organizations[i] = orgArray.get(i).toString();
+ }
jwtInfoDto.setOrganizations(organizations);
}
} | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,922,500 | 855,947 | 109,303 | 445 | 437 | 91 | 7 | 1 | 2,757 | 121 | 648 | 34 | 0 | 1 | 1970-01-01T00:27:15 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,117 | wso2/product-microgateway/2556/2561 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2561 | https://github.com/wso2/product-microgateway/pull/2556 | https://github.com/wso2/product-microgateway/pull/2556 | 1 | fixes | Unlimited API Tier does not apply after resource level tier is set once | ### Description:
$subject
### Steps to reproduce:
1. Create an API and assign resource level Tier and save.
2. Deploy the API and see if the resource level tier is applied.
3. Then change it to API Level and assign unlimited tier.
4. Deploy the API and see if the resource level tier is applied. (in this case, it would apply.)
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 12a2e8aef063c31d077e51126dd86b8fff171678 | 4f7da1064b42b3726baf0a0b30544284a1159edd | https://github.com/wso2/product-microgateway/compare/12a2e8aef063c31d077e51126dd86b8fff171678...4f7da1064b42b3726baf0a0b30544284a1159edd | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/APIConfig.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/APIConfig.java
index 32fe1df81..331d53b84 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/APIConfig.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/APIConfig.java
@@ -40,7 +40,7 @@ public class APIConfig {
private String uuid;
private Map<String, List<String>> apiSecurity = new HashMap<>();
- private String tier = "Unlimited";
+ private String tier;
private boolean disableSecurity = false;
private List<ResourceConfig> resources = new ArrayList<>();
@@ -203,7 +203,7 @@ public class APIConfig {
private String uuid;
private Map<String, SecuritySchemaConfig> securitySchemeDefinitions;
private Map<String, List<String>> apiSecurity = new HashMap<>();
- private String tier = "Unlimited";
+ private String tier;
private boolean disableSecurity = false;
private List<ResourceConfig> resources = new ArrayList<>();
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
index dde3ff0bf..2df3c35ef 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
@@ -228,7 +228,6 @@ public class InternalAPIKeyAuthenticator extends APIKeyHandler {
}
return FilterUtils.generateAuthenticationContext(tokenIdentifier, payload, api,
- requestContext.getMatchedAPI().getTier(),
requestContext.getMatchedAPI().getUuid(), internalKey);
} else {
log.error("Internal Key authentication failed. " + FilterUtils.getMaskedToken(splitToken[0]));
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
index 19f1a7abf..a006c434e 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
@@ -135,7 +135,7 @@ public class ThrottleFilter implements Filter {
String authorizedUser = FilterUtils.buildUsernameWithTenant(authContext.getUsername(), appTenant);
boolean isApiLevelTriggered = false;
- if (!StringUtils.isEmpty(apiTier) && !ThrottleConstants.UNLIMITED_TIER.equalsIgnoreCase(apiTier)) {
+ if (!StringUtils.isEmpty(api.getTier())) {
resourceThrottleKey = apiThrottleKey;
resourceTier = apiTier;
isApiLevelTriggered = true;
@@ -298,7 +298,9 @@ public class ThrottleFilter implements Filter {
tenantDomain = APIConstants.SUPER_TENANT_DOMAIN_NAME;
}
- if (!StringUtils.isEmpty(apiTier) && !ThrottleConstants.UNLIMITED_TIER.equals(apiTier)) {
+ // apiConfig instance will have the tier assigned only if openapi definition contains the
+ // extension
+ if (!StringUtils.isEmpty(api.getTier())) {
resourceTier = apiTier;
resourceKey = apiContext;
} else {
@@ -362,7 +364,7 @@ public class ThrottleFilter implements Filter {
}
private String getApiTier(APIConfig apiConfig) {
- if (!apiConfig.getTier().isBlank()) {
+ if (!StringUtils.isEmpty(apiConfig.getTier())) {
return apiConfig.getTier();
}
return ThrottleConstants.UNLIMITED_TIER;
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
index b5a97aae3..b03c3567e 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
@@ -300,13 +300,12 @@ public class FilterUtils {
* @param tokenIdentifier
* @param payload
* @param api
- * @param apiLevelPolicy
* @param rawToken Raw token used to authenticate the request
* @return
* @throws java.text.ParseException
*/
public static AuthenticationContext generateAuthenticationContext(String tokenIdentifier, JWTClaimsSet payload,
- JSONObject api, String apiLevelPolicy,
+ JSONObject api,
String apiUUID, String rawToken)
throws java.text.ParseException {
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
index a4a292f1c..b0344c52c 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
@@ -27,6 +27,7 @@ import org.wso2.choreo.connect.enforcer.commons.model.APIConfig;
import org.wso2.choreo.connect.enforcer.commons.model.AuthenticationContext;
import org.wso2.choreo.connect.enforcer.commons.model.RequestContext;
import org.wso2.choreo.connect.enforcer.constants.APIConstants;
+import org.wso2.choreo.connect.enforcer.throttle.ThrottleConstants;
import org.wso2.choreo.connect.enforcer.tracing.TracingConstants;
import org.wso2.choreo.connect.enforcer.tracing.TracingSpan;
import org.wso2.choreo.connect.enforcer.tracing.TracingTracer;
@@ -63,6 +64,8 @@ public class WebSocketMetaDataFilter implements Filter {
ThreadContext.get(APIConstants.LOG_TRACE_ID));
}
+ String apiTier = !requestContext.getMatchedAPI().getTier().isBlank()
+ ? requestContext.getMatchedAPI().getTier() : ThrottleConstants.UNLIMITED_TIER;
AuthenticationContext authenticationContext = requestContext.getAuthenticationContext();
requestContext.addMetadataToMap(MetadataConstants.GRPC_STREAM_ID, UUID.randomUUID().toString());
requestContext.addMetadataToMap(MetadataConstants.REQUEST_ID,
@@ -74,7 +77,7 @@ public class WebSocketMetaDataFilter implements Filter {
requestContext.addMetadataToMap(MetadataConstants.TIER,
getNullableStringValue(authenticationContext.getTier()));
requestContext.addMetadataToMap(MetadataConstants.API_TIER,
- getNullableStringValue(requestContext.getMatchedAPI().getTier()));
+ getNullableStringValue(apiTier));
requestContext.addMetadataToMap(MetadataConstants.CONTENT_AWARE_TIER_PRESENT,
getNullableStringValue(String.valueOf(authenticationContext.isContentAwareTierPresent())));
requestContext.addMetadataToMap(MetadataConstants.API_KEY, | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java', 'enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/APIConfig.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 4,107,132 | 895,135 | 114,643 | 485 | 1,381 | 250 | 21 | 5 | 742 | 116 | 165 | 29 | 0 | 0 | 1970-01-01T00:27:18 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,121 | wso2/product-microgateway/2508/2509 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2509 | https://github.com/wso2/product-microgateway/pull/2508 | https://github.com/wso2/product-microgateway/pull/2508 | 1 | fixes | Path Parameters are not populated for some cases with wildcard templates | ### Description:
Path Parameters are not populated for a template like /pet/{petID/somestring/* when the rawURL is /pet/1/somestring.
In addition, following log is printed.
```
PathTemplate and RawPath is mismatched.
```
### Steps to reproduce:
### Affected Product Version:
choreo-connect-1.0.0-beta
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 892384d54d64ecfc305184146cc206b5228a36eb | 639af63973a7e42165a9c075c80698087ffe7d80 | https://github.com/wso2/product-microgateway/compare/892384d54d64ecfc305184146cc206b5228a36eb...639af63973a7e42165a9c075c80698087ffe7d80 | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
index 1ba41f305..b102c2fad 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
@@ -33,9 +33,11 @@ class ParameterResolver {
private static final Logger logger = LogManager.getLogger(ParameterResolver.class);
private final List<String> parameterNames = new ArrayList<>();
private final Pattern pattern;
+ private String pathTemplate;
public ParameterResolver(final String parameterTemplate) {
// This formatting is required since /foo and /foo/ are considered to be equal
+ this.pathTemplate = parameterTemplate;
String formattedPathParamTemplate = parameterTemplate.endsWith("/") ?
parameterTemplate.substring(0, parameterTemplate.length() - 1) : parameterTemplate;
final Matcher matcher = PARAMETER_PATTERN.matcher(formattedPathParamTemplate);
@@ -52,7 +54,7 @@ class ParameterResolver {
}
String regex = Pattern.quote(matcher.replaceAll("_____PARAM_____"))
.replace("_____PARAM_____", "\\\\E([^/]*)\\\\Q");
- regex = regex.endsWith("*\\\\E") ? regex.substring(0, regex.length() - 3) + "\\\\E(.*)" : regex;
+ regex = regex.endsWith("*\\\\E") ? regex.substring(0, regex.length() - 4) + "\\\\E($|([/]{1}(.*)))" : regex;
pattern = Pattern.compile(regex);
}
@@ -63,7 +65,7 @@ class ParameterResolver {
final Matcher matcher = pattern.matcher(formattedURI);
if (!matcher.matches()) {
// Unlikely to occur as this pair is already matched within router.
- logger.error("PathTemplate and RawPath is mismatched.");
+ logger.debug("PathTemplate: {} and RawPath: {} is mismatched.", pathTemplate, uriString);
return new HashMap<>();
}
final Map<String, String> map = new HashMap<>();
diff --git a/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java b/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
index 6c9e4fe31..c338bdecf 100644
--- a/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
+++ b/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
@@ -72,8 +72,15 @@ public class RequestContextTest {
public void testPathParameterGenerationWithWildcard() {
testPathParamValues("/v2/pet/12/2/random/random2", "/v2", "/pet/{petId}/{imageId}/*",
"petId", "12");
+ testPathParamValues("/v2/pet/12/2/random/random2/random3", "/v2", "/pet/{petId}/{imageId}/*",
+ "petId", "12");
testPathParamValues("/v2/pet/12/image/2/random/random2", "/v2", "/pet/{petId}/image/{imageId}/*",
"imageId", "2");
+ testPathParamValues("/v2/pet/12/2/image", "/v2", "/pet/{petId}/{imageId}/image/*",
+ "petId", "12");
+ testPathParamValues("/v2/pet/pet-1/image/*", "/v2", "/pet/{imageId}/image/*",
+ "imageId", "pet-1");
+ testMismatchedPaths("/v2/pet/12/2/image123", "/v2", "/pet/{petId}/{imageId}/image/*");
}
@Test
@@ -107,4 +114,13 @@ public class RequestContextTest {
requestContext.getPathParameters().get(pathParamName), expectedValue);
}
+ private void testMismatchedPaths(String rawPath, String basePath, String pathTemplate) {
+ RequestContext.Builder builder = new RequestContext.Builder(rawPath);
+ builder.matchedAPI(new APIConfig.Builder("Petstore").basePath(basePath).build());
+ builder.pathTemplate(pathTemplate);
+ RequestContext requestContext = builder.build();
+ Assert.assertNotNull(requestContext.getPathParameters());
+ Assert.assertEquals(0, requestContext.getPathParameters().size());
+ }
+
} | ['enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java', 'enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 4,101,847 | 894,160 | 114,538 | 484 | 471 | 119 | 6 | 1 | 641 | 83 | 150 | 30 | 0 | 1 | 1970-01-01T00:27:18 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,120 | wso2/product-microgateway/2519/2520 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2520 | https://github.com/wso2/product-microgateway/pull/2519 | https://github.com/wso2/product-microgateway/pull/2519 | 1 | fixes | Websocket APIs cannot be invoked due to NPE. | ### Description:
Websocket APIs cannot be invoked due to NPE in enforcer.
### Steps to reproduce:
start choreo-connect-with-apim
Deploy websocketAPI
Invoke the API with a valid token obtained from devportal
### Affected Product Version:
choreo-connect-1.0.0-beta2-snapshot
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 892384d54d64ecfc305184146cc206b5228a36eb | 849d9fedfe64c8fa61f9fd03173130a33de9c248 | https://github.com/wso2/product-microgateway/compare/892384d54d64ecfc305184146cc206b5228a36eb...849d9fedfe64c8fa61f9fd03173130a33de9c248 | diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
index a93a71489..fbe71d153 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
@@ -234,7 +234,10 @@ public class AuthFilter implements Filter {
}
ResourceConfig resourceConfig = requestContext.getMatchedResourcePath();
- if (resourceConfig.getEndpoints().containsKey(keyType)) {
+ // In websockets case, the endpoints object becomes null. Hence it would result
+ // in a NPE, if it is not checked.
+ if (resourceConfig.getEndpoints() != null &&
+ resourceConfig.getEndpoints().containsKey(keyType)) {
EndpointCluster endpointCluster = resourceConfig.getEndpoints().get(keyType);
// Apply resource level retry headers | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,101,847 | 894,160 | 114,538 | 484 | 324 | 69 | 5 | 1 | 612 | 79 | 140 | 28 | 0 | 0 | 1970-01-01T00:27:18 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,119 | wso2/product-microgateway/2524/2529 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2529 | https://github.com/wso2/product-microgateway/pull/2524 | https://github.com/wso2/product-microgateway/pull/2524 | 1 | fixes | Set Application ID instead of UUID for throttle event. | ### Description:
There are two IDs associated with application; an incremental ID and a UUID. It is required to use incremental ID rather than UUID, as it is the approach taken by the synapse gateway. This is required to have the deployment pattern where synapse gateway and choreo connect both runs connecting to a single TM instance.
### Steps to reproduce:
add log publisher for throttle request stream in apim (navigate to /carbon)
Enable global throttling in CC
Invoke.
### Affected Product Version:
choreo-connect-beta
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 25946350568fb934eb1776bc9667da454760510f | 4a4ed36c6673cb9c3f95f77579ebf22dfc0216e7 | https://github.com/wso2/product-microgateway/compare/25946350568fb934eb1776bc9667da454760510f...4a4ed36c6673cb9c3f95f77579ebf22dfc0216e7 | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/AuthenticationContext.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/AuthenticationContext.java
index 514eed52e..c5a78403e 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/AuthenticationContext.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/AuthenticationContext.java
@@ -31,7 +31,8 @@ public class AuthenticationContext {
private String apiKey;
private String keyType;
private String callerToken;
- private String applicationId;
+ private int applicationId;
+ private String applicationUUID;
private String applicationName;
private String consumerKey;
private String subscriber;
@@ -51,9 +52,9 @@ public class AuthenticationContext {
public static final String UNKNOWN_VALUE = "__unknown__";
public AuthenticationContext() {
- this.applicationId = UNKNOWN_VALUE;
this.apiPublisher = UNKNOWN_VALUE;
- this.applicationId = UNKNOWN_VALUE;
+ this.applicationUUID = UNKNOWN_VALUE;
+ this.applicationId = -1;
this.applicationName = UNKNOWN_VALUE;
this.applicationTier = "Unlimited";
this.consumerKey = UNKNOWN_VALUE;
@@ -190,15 +191,15 @@ public class AuthenticationContext {
}
/**
- * Get the application UUID for the matched application.
+ * Get the application ID for the matched application.
*
* @return
*/
- public String getApplicationId() {
+ public int getApplicationId() {
return applicationId;
}
- public void setApplicationId(String applicationId) {
+ public void setApplicationId(int applicationId) {
this.applicationId = applicationId;
}
@@ -370,4 +371,12 @@ public class AuthenticationContext {
public void setRawToken(String rawToken) {
this.rawToken = rawToken;
}
+
+ public String getApplicationUUID() {
+ return applicationUUID;
+ }
+
+ public void setApplicationUUID(String applicationUUID) {
+ this.applicationUUID = applicationUUID;
+ }
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/AnalyticsFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/AnalyticsFilter.java
index a700334c2..376cfec06 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/AnalyticsFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/AnalyticsFilter.java
@@ -127,8 +127,8 @@ public class AnalyticsFilter {
// Default Value would be PRODUCTION
requestContext.addMetadataToMap(MetadataConstants.APP_KEY_TYPE_KEY,
authContext.getKeyType() == null ? APIConstants.API_KEY_TYPE_PRODUCTION : authContext.getKeyType());
- requestContext.addMetadataToMap(MetadataConstants.APP_ID_KEY,
- AnalyticsUtils.setDefaultIfNull(authContext.getApplicationId()));
+ requestContext.addMetadataToMap(MetadataConstants.APP_UUID_KEY,
+ AnalyticsUtils.setDefaultIfNull(authContext.getApplicationUUID()));
requestContext.addMetadataToMap(MetadataConstants.APP_NAME_KEY,
AnalyticsUtils.setDefaultIfNull(authContext.getApplicationName()));
requestContext.addMetadataToMap(MetadataConstants.APP_OWNER_KEY,
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoAnalyticsProvider.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoAnalyticsProvider.java
index 634187304..9c8a794a0 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoAnalyticsProvider.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoAnalyticsProvider.java
@@ -124,7 +124,7 @@ public class ChoreoAnalyticsProvider implements AnalyticsDataProvider {
application.setApplicationOwner(getValueAsString(fieldsMap, MetadataConstants.APP_OWNER_KEY));
application.setApplicationName(getValueAsString(fieldsMap, MetadataConstants.APP_NAME_KEY));
application.setKeyType(getValueAsString(fieldsMap, MetadataConstants.APP_KEY_TYPE_KEY));
- application.setApplicationId(getValueAsString(fieldsMap, MetadataConstants.APP_ID_KEY));
+ application.setApplicationId(getValueAsString(fieldsMap, MetadataConstants.APP_UUID_KEY));
return application;
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
index 21d80ad34..c0277e119 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java
@@ -59,7 +59,7 @@ public class ChoreoFaultAnalyticsProvider implements AnalyticsDataProvider {
@Override
public boolean isAnonymous() {
return requestContext.getAuthenticationContext() == null ||
- StringUtils.isEmpty(requestContext.getAuthenticationContext().getApplicationId());
+ StringUtils.isEmpty(requestContext.getAuthenticationContext().getApplicationUUID());
}
@Override
@@ -118,7 +118,7 @@ public class ChoreoFaultAnalyticsProvider implements AnalyticsDataProvider {
// Default Value would be PRODUCTION
application.setKeyType(
authContext.getKeyType() == null ? APIConstants.API_KEY_TYPE_PRODUCTION : authContext.getKeyType());
- application.setApplicationId(AnalyticsUtils.setDefaultIfNull(authContext.getApplicationId()));
+ application.setApplicationId(AnalyticsUtils.setDefaultIfNull(authContext.getApplicationUUID()));
application.setApplicationOwner(AnalyticsUtils.setDefaultIfNull(authContext.getSubscriber()));
application.setApplicationName(AnalyticsUtils.setDefaultIfNull(authContext.getApplicationName()));
return application;
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/MetadataConstants.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/MetadataConstants.java
index d6a5c69dc..d63b58f3a 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/MetadataConstants.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/MetadataConstants.java
@@ -34,6 +34,7 @@ public class MetadataConstants {
public static final String API_ORGANIZATION_ID = WSO2_METADATA_PREFIX + "api-organization-id";
public static final String APP_ID_KEY = WSO2_METADATA_PREFIX + "application-id";
+ public static final String APP_UUID_KEY = WSO2_METADATA_PREFIX + "application-uuid";
public static final String APP_KEY_TYPE_KEY = WSO2_METADATA_PREFIX + "application-key-type";
public static final String APP_NAME_KEY = WSO2_METADATA_PREFIX + "application-name";
public static final String APP_OWNER_KEY = WSO2_METADATA_PREFIX + "application-owner";
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/APIKeyValidationInfoDTO.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/APIKeyValidationInfoDTO.java
index 4930db085..455b29b3e 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/APIKeyValidationInfoDTO.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/APIKeyValidationInfoDTO.java
@@ -42,7 +42,7 @@ public class APIKeyValidationInfoDTO implements Serializable {
private String userType;
private String endUserToken;
private String endUserName;
- private String applicationId;
+ private int applicationId;
private String applicationName;
private String applicationTier;
//use this to pass key validation status
@@ -140,11 +140,11 @@ public class APIKeyValidationInfoDTO implements Serializable {
this.endUserName = endUserName;
}
- public String getApplicationId() {
+ public int getApplicationId() {
return applicationId;
}
- public void setApplicationId(String applicationId) {
+ public void setApplicationId(int applicationId) {
this.applicationId = applicationId;
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/KeyValidator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/KeyValidator.java
index e44e95359..776e9c310 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/KeyValidator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/KeyValidator.java
@@ -281,7 +281,8 @@ public class KeyValidator {
}
infoDTO.setTier(sub.getPolicyId());
infoDTO.setSubscriber(app.getSubName());
- infoDTO.setApplicationId(app.getUUID());
+ infoDTO.setApplicationId(app.getId());
+ infoDTO.setApplicationUUID(app.getUUID());
infoDTO.setApiName(api.getApiName());
infoDTO.setApiVersion(api.getApiVersion());
infoDTO.setApiPublisher(api.getApiProvider());
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
index f71c6390c..746a2e049 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
@@ -273,7 +273,9 @@ public class APIKeyAuthenticator extends APIKeyHandler {
validationInfoDTO.setType(APIConstants.API_KEY_TYPE_PRODUCTION);
}
if (app != null) {
- validationInfoDTO.setApplicationId(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_ID));
+ validationInfoDTO.setApplicationId(app.getAsNumber(APIConstants.JwtTokenConstants.APPLICATION_ID)
+ .intValue());
+ validationInfoDTO.setApplicationUUID(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_UUID));
validationInfoDTO.setApplicationName(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_NAME));
validationInfoDTO.setApplicationTier(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_TIER));
validationInfoDTO.setSubscriber(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_OWNER));
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
index 8d92efd37..a5263c0eb 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
@@ -324,7 +324,8 @@ public class JWTAuthenticator implements Authenticator {
String kmReference) {
String applicationRef = APIConstants.ANONYMOUS_PREFIX + kmReference;
apiKeyValidationInfoDTO.setApplicationName(applicationRef);
- apiKeyValidationInfoDTO.setApplicationId(
+ apiKeyValidationInfoDTO.setApplicationId(-1);
+ apiKeyValidationInfoDTO.setApplicationUUID(
UUID.nameUUIDFromBytes(
applicationRef.getBytes(StandardCharsets.UTF_8)).toString());
apiKeyValidationInfoDTO.setApplicationTier(APIConstants.UNLIMITED_TIER);
@@ -453,7 +454,8 @@ public class JWTAuthenticator implements Authenticator {
JSONObject app = payload.getJSONObjectClaim(APIConstants.JwtTokenConstants.APPLICATION);
if (app != null) {
validationInfo.setApplicationUUID(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_UUID));
- validationInfo.setApplicationId(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_ID));
+ validationInfo.setApplicationId(app.getAsNumber(APIConstants.JwtTokenConstants.APPLICATION_ID)
+ .intValue());
validationInfo.setApplicationName(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_NAME));
validationInfo.setApplicationTier(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_TIER));
validationInfo.setSubscriber(app.getAsString(APIConstants.JwtTokenConstants.APPLICATION_OWNER));
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/server/WebSocketHandler.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/server/WebSocketHandler.java
index 5f46712cc..fc50ad820 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/server/WebSocketHandler.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/server/WebSocketHandler.java
@@ -18,6 +18,7 @@
package org.wso2.choreo.connect.enforcer.server;
import io.opentelemetry.context.Scope;
+import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.ThreadContext;
@@ -105,7 +106,11 @@ public class WebSocketHandler implements RequestHandler<WebSocketFrameRequest, W
String apiKey = extAuthMetadata.get(MetadataConstants.API_KEY);
String keyType = extAuthMetadata.get(MetadataConstants.KEY_TYPE);
String callerToken = extAuthMetadata.get(MetadataConstants.CALLER_TOKEN);
- String applicationId = extAuthMetadata.get(MetadataConstants.APP_ID);
+ int applicationId = -1;
+ if (!StringUtils.isEmpty(extAuthMetadata.get(MetadataConstants.APP_ID))) {
+ applicationId = Integer.parseInt(extAuthMetadata.get(MetadataConstants.APP_ID));
+ }
+
String applicationName = extAuthMetadata.get(MetadataConstants.APP_NAME);
String consumerKey = extAuthMetadata.get(MetadataConstants.CONSUMER_KEY);
String subscriber = extAuthMetadata.get(MetadataConstants.SUBSCRIBER);
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleDataHolder.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleDataHolder.java
index 81a3750f3..8652e7098 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleDataHolder.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleDataHolder.java
@@ -387,7 +387,7 @@ public class ThrottleDataHolder {
* @return throttle {@link Decision}
*/
public Decision isThrottledByCustomPolicy(String userID, String resourceKey, String apiContext, String apiVersion,
- String appTenant, String apiTenant, String appId, String clientIp) {
+ String appTenant, String apiTenant, int appId, String clientIp) {
Decision decision = new Decision();
if (keyTemplates.size() > 0) {
for (String key : keyTemplates.keySet()) {
@@ -397,7 +397,7 @@ public class ThrottleDataHolder {
key = key.replaceAll("\\\\$apiVersion", apiVersion);
key = key.replaceAll("\\\\$appTenant", appTenant);
key = key.replaceAll("\\\\$apiTenant", apiTenant);
- key = key.replaceAll("\\\\$appId", appId);
+ key = key.replaceAll("\\\\$appId", String.valueOf(appId));
if (clientIp != null) {
key = key.replaceAll("\\\\$clientIp", FilterUtils.ipToBigInteger(clientIp).toString());
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
index f7ca83c3e..19f1a7abf 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
@@ -122,7 +122,7 @@ public class ThrottleFilter implements Filter {
APIConfig api = reqContext.getMatchedAPI();
String apiContext = api.getBasePath();
String apiVersion = api.getVersion();
- String appId = authContext.getApplicationId();
+ int appId = authContext.getApplicationId();
String apiTier = getApiTier(api);
String apiThrottleKey = getApiThrottleKey(apiContext, apiVersion);
String resourceTier = getResourceTier(reqContext.getMatchedResourcePath());
@@ -202,7 +202,7 @@ public class ThrottleFilter implements Filter {
}
// Checking Application level throttling
- String appThrottleKey = appId + ':' + authorizedUser;
+ String appThrottleKey = appId + ":" + authorizedUser;
Decision appDecision = checkAppLevelThrottled(appThrottleKey, appTier);
if (appDecision.isThrottled()) {
log.debug("Setting application throttle out response");
@@ -307,11 +307,11 @@ public class ThrottleFilter implements Filter {
}
throttleEvent.put(ThrottleEventConstants.MESSAGE_ID, requestContext.getRequestID());
- throttleEvent.put(ThrottleEventConstants.APP_KEY, authContext.getApplicationId() + ':' + authorizedUser);
+ throttleEvent.put(ThrottleEventConstants.APP_KEY, authContext.getApplicationId() + ":" + authorizedUser);
throttleEvent.put(ThrottleEventConstants.APP_TIER, authContext.getApplicationTier());
throttleEvent.put(ThrottleEventConstants.API_KEY, apiContext);
throttleEvent.put(ThrottleEventConstants.API_TIER, apiTier);
- throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_KEY, authContext.getApplicationId() + ':' +
+ throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_KEY, authContext.getApplicationId() + ":" +
apiContext);
throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_TIER, authContext.getTier());
throttleEvent.put(ThrottleEventConstants.RESOURCE_KEY, resourceKey);
@@ -322,7 +322,7 @@ public class ThrottleFilter implements Filter {
throttleEvent.put(ThrottleEventConstants.API_VERSION, apiVersion);
throttleEvent.put(ThrottleEventConstants.APP_TENANT, authContext.getSubscriberTenantDomain());
throttleEvent.put(ThrottleEventConstants.API_TENANT, tenantDomain);
- throttleEvent.put(ThrottleEventConstants.APP_ID, authContext.getApplicationId());
+ throttleEvent.put(ThrottleEventConstants.APP_ID, String.valueOf(authContext.getApplicationId()));
throttleEvent.put(ThrottleEventConstants.API_NAME, apiName);
throttleEvent.put(ThrottleEventConstants.PROPERTIES, getProperties(requestContext).toString());
return throttleEvent;
@@ -346,8 +346,8 @@ public class ThrottleFilter implements Filter {
return apiThrottleKey;
}
- private String getSubscriptionThrottleKey(String appId, String apiContext, String apiVersion) {
- String subThrottleKey = appId + ':' + apiContext;
+ private String getSubscriptionThrottleKey(int appId, String apiContext, String apiVersion) {
+ String subThrottleKey = appId + ":" + apiContext;
if (!apiVersion.isBlank()) {
subThrottleKey += ':' + apiVersion;
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
index b463d006d..b5a97aae3 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java
@@ -200,7 +200,8 @@ public class FilterUtils {
}
// Setting end user as anonymous
authContext.setUsername(APIConstants.END_USER_ANONYMOUS);
- authContext.setApplicationId(clientIP);
+ // TODO: (VirajSalaka) clientIP for applicationUUID?
+ authContext.setApplicationUUID(clientIP);
authContext.setApplicationName(null);
authContext.setApplicationTier(APIConstants.UNLIMITED_TIER);
authContext.setSubscriber(APIConstants.END_USER_ANONYMOUS);
@@ -229,6 +230,7 @@ public class FilterUtils {
if (apiKeyValidationInfoDTO != null) {
authContext.setKeyType(apiKeyValidationInfoDTO.getType());
authContext.setApplicationId(apiKeyValidationInfoDTO.getApplicationId());
+ authContext.setApplicationUUID(apiKeyValidationInfoDTO.getApplicationUUID());
authContext.setApplicationName(apiKeyValidationInfoDTO.getApplicationName());
authContext.setApplicationTier(apiKeyValidationInfoDTO.getApplicationTier());
authContext.setSubscriber(apiKeyValidationInfoDTO.getSubscriber());
@@ -328,7 +330,7 @@ public class FilterUtils {
authContext.setApiUUID(apiUUID);
}
authContext.setApplicationName(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME);
- authContext.setApplicationId(UUID.nameUUIDFromBytes(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME.
+ authContext.setApplicationUUID(UUID.nameUUIDFromBytes(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME.
getBytes(StandardCharsets.UTF_8)).toString());
authContext.setApplicationTier(APIConstants.UNLIMITED_TIER);
authContext.setSubscriber(APIConstants.JwtTokenConstants.INTERNAL_KEY_APP_NAME);
@@ -368,7 +370,7 @@ public class FilterUtils {
}
}
if (apiKeyValidationInfoDTO != null) {
- jwtInfoDto.setApplicationId(apiKeyValidationInfoDTO.getApplicationId());
+ jwtInfoDto.setApplicationId(apiKeyValidationInfoDTO.getApplicationUUID());
jwtInfoDto.setApplicationName(apiKeyValidationInfoDTO.getApplicationName());
jwtInfoDto.setApplicationTier(apiKeyValidationInfoDTO.getApplicationTier());
jwtInfoDto.setKeyType(apiKeyValidationInfoDTO.getType());
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/MetadataConstants.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/MetadataConstants.java
index 1766047a5..139724b49 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/MetadataConstants.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/MetadataConstants.java
@@ -30,6 +30,7 @@ public class MetadataConstants {
public static final String KEY_TYPE = "keyType";
public static final String CALLER_TOKEN = "callerToken";
public static final String APP_ID = "applicationId";
+ public static final String APP_UUID = "applicationUUID";
public static final String APP_NAME = "applicationName";
public static final String CONSUMER_KEY = "consumerKey";
public static final String SUBSCRIBER = "subscriber";
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
index eb3641d56..a4a292f1c 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java
@@ -84,7 +84,10 @@ public class WebSocketMetaDataFilter implements Filter {
requestContext.addMetadataToMap(MetadataConstants.CALLER_TOKEN,
getNullableStringValue(authenticationContext.getCallerToken()));
requestContext.addMetadataToMap(MetadataConstants.APP_ID,
- getNullableStringValue(authenticationContext.getApplicationId()));
+ String.valueOf(authenticationContext.getApplicationId()));
+ // Unused but added to maintain the consistancy
+ requestContext.addMetadataToMap(MetadataConstants.APP_UUID,
+ String.valueOf(authenticationContext.getApplicationUUID()));
requestContext.addMetadataToMap(MetadataConstants.APP_NAME,
getNullableStringValue(authenticationContext.getApplicationName()));
requestContext.addMetadataToMap(MetadataConstants.CONSUMER_KEY,
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketThrottleFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketThrottleFilter.java
index c867bcd8c..763996124 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketThrottleFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketThrottleFilter.java
@@ -97,7 +97,7 @@ public class WebSocketThrottleFilter implements Filter {
APIConfig api = requestContext.getMatchedAPI();
String apiContext = api.getBasePath();
String apiVersion = api.getVersion();
- String appId = authContext.getApplicationId();
+ int appId = authContext.getApplicationId();
String apiTier = getApiTier(api);
String apiThrottleKey = getApiThrottleKey(apiContext, apiVersion);
String subTier = authContext.getTier();
@@ -164,7 +164,7 @@ public class WebSocketThrottleFilter implements Filter {
}
// Checking Application level throttling
- String appThrottleKey = appId + ':' + authorizedUser;
+ String appThrottleKey = appId + ":" + authorizedUser;
Decision appDecision = checkAppLevelThrottled(appThrottleKey, appTier);
if (appDecision.isThrottled()) {
log.debug("Setting application throttle out response");
@@ -196,7 +196,7 @@ public class WebSocketThrottleFilter implements Filter {
return apiThrottleKey;
}
- private String getSubscriptionThrottleKey(String appId, String apiContext, String apiVersion) {
+ private String getSubscriptionThrottleKey(int appId, String apiContext, String apiVersion) {
String subThrottleKey = appId + ':' + apiContext;
if (!apiVersion.isBlank()) {
subThrottleKey += ':' + apiVersion;
@@ -237,11 +237,11 @@ public class WebSocketThrottleFilter implements Filter {
}
throttleEvent.put(ThrottleEventConstants.MESSAGE_ID, requestContext.getRequestID());
- throttleEvent.put(ThrottleEventConstants.APP_KEY, authContext.getApplicationId() + ':' + authorizedUser);
+ throttleEvent.put(ThrottleEventConstants.APP_KEY, authContext.getApplicationId() + ":" + authorizedUser);
throttleEvent.put(ThrottleEventConstants.APP_TIER, authContext.getApplicationTier());
throttleEvent.put(ThrottleEventConstants.API_KEY, apiContext);
throttleEvent.put(ThrottleEventConstants.API_TIER, apiTier);
- throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_KEY, authContext.getApplicationId() + ':' +
+ throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_KEY, authContext.getApplicationId() + ":" +
apiContext);
throttleEvent.put(ThrottleEventConstants.SUBSCRIPTION_TIER, authContext.getTier());
// TODO: (Praminda) should publish with tenant domain?
@@ -250,7 +250,7 @@ public class WebSocketThrottleFilter implements Filter {
throttleEvent.put(ThrottleEventConstants.API_VERSION, apiVersion);
throttleEvent.put(ThrottleEventConstants.APP_TENANT, authContext.getSubscriberTenantDomain());
throttleEvent.put(ThrottleEventConstants.API_TENANT, tenantDomain);
- throttleEvent.put(ThrottleEventConstants.APP_ID, authContext.getApplicationId());
+ throttleEvent.put(ThrottleEventConstants.APP_ID, String.valueOf(authContext.getApplicationId()));
throttleEvent.put(ThrottleEventConstants.API_NAME, apiName);
throttleEvent.put(ThrottleEventConstants.PROPERTIES, getProperties(requestContext).toString());
return throttleEvent; | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java', 'enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/AuthenticationContext.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/MetadataConstants.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleDataHolder.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoFaultAnalyticsProvider.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketThrottleFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/AnalyticsFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/analytics/ChoreoAnalyticsProvider.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/FilterUtils.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/KeyValidator.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/MetadataConstants.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/dto/APIKeyValidationInfoDTO.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/server/WebSocketHandler.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/websocket/WebSocketMetaDataFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java'] | {'.java': 16} | 16 | 16 | 0 | 0 | 16 | 4,103,965 | 894,489 | 114,570 | 484 | 7,140 | 1,298 | 102 | 16 | 863 | 128 | 181 | 28 | 0 | 0 | 1970-01-01T00:27:18 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,118 | wso2/product-microgateway/2538/2537 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2537 | https://github.com/wso2/product-microgateway/pull/2538 | https://github.com/wso2/product-microgateway/pull/2538 | 1 | fixes | NPE when invoking Websocket APIs | ### Description:
<!-- Describe the issue -->
$subject. Below log can be seen in the enforcer logs.
```
enforcer_1 | java.lang.NullPointerException
enforcer_1 | at org.wso2.choreo.connect.enforcer.commons.model.RequestContext$Builder.populatePathParameters(RequestContext.java:454)
enforcer_1 | at org.wso2.choreo.connect.enforcer.commons.model.RequestContext$Builder.build(RequestContext.java:421)
enforcer_1 | at org.wso2.choreo.connect.enforcer.server.WebSocketHandler.buildRequestContext(WebSocketHandler.java:157)
enforcer_1 | at org.wso2.choreo.connect.enforcer.server.WebSocketHandler.process(WebSocketHandler.java:80)
enforcer_1 | at org.wso2.choreo.connect.enforcer.websocket.WebSocketResponseObserver.lambda$onNext$0(WebSocketResponseObserver.java:77)
enforcer_1 | at java.base/java.lang.Iterable.forEach(Unknown Source)
enforcer_1 | at org.wso2.choreo.connect.enforcer.websocket.WebSocketResponseObserver.onNext(WebSocketResponseObserver.java:70)
enforcer_1 | at org.wso2.choreo.connect.enforcer.websocket.WebSocketResponseObserver.onNext(WebSocketResponseObserver.java:41)
enforcer_1 | at io.grpc.stub.ServerCalls$StreamingServerCallHandler$StreamingServerCallListener.onMessage(ServerCalls.java:249)
enforcer_1 | at io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.messagesAvailableInternal(ServerCallImpl.java:309)
enforcer_1 | at io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.messagesAvailable(ServerCallImpl.java:292)
enforcer_1 | at io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1MessagesAvailable.runInContext(ServerImpl.java:782)
enforcer_1 | at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
enforcer_1 | at io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
enforcer_1 | at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
enforcer_1 | at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
enforcer_1 | at java.base/java.lang.Thread.run(Unknown Source)
```
### Steps to reproduce:
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
1.0.0-rc1-snapshot
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s): | ee416e1dc8f1988c50ead87696982204de5e4202 | e58f010246b8a646164e468f0f9da09a3ec76d12 | https://github.com/wso2/product-microgateway/compare/ee416e1dc8f1988c50ead87696982204de5e4202...e58f010246b8a646164e468f0f9da09a3ec76d12 | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
index 2a3eed21f..38f5b63e0 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
@@ -20,6 +20,8 @@ package org.wso2.choreo.connect.enforcer.commons.model;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
@@ -33,6 +35,7 @@ import java.util.TreeMap;
* through out the complete request flow through the gateway enforcer.
*/
public class RequestContext {
+ private static final Logger logger = LogManager.getLogger(RequestContext.class);
//constants
public static final String CLUSTER_HEADER = "x-wso2-cluster-header";
@@ -447,6 +450,11 @@ public class RequestContext {
*/
private Map<String, String> populatePathParameters(String basePath, String resourceTemplate,
String rawPath) {
+ if (resourceTemplate == null || rawPath == null) {
+ logger.debug("Skip populating the path parameters. template: {}, rawPath: {}", resourceTemplate,
+ rawPath);
+ return null;
+ }
// Format the basePath and resourcePath to maintain consistency
String formattedBasePath = basePath.startsWith("/") ? basePath : "/" + basePath;
formattedBasePath = formattedBasePath.endsWith("/") ?
@@ -457,8 +465,7 @@ public class RequestContext {
String formattedRawPath = rawPath.split("\\\\?")[0];
final ParameterResolver parameterResolver = new ParameterResolver
(formattedBasePath + formattedResourcePathTemplate);
- final Map<String, String> resultMap = parameterResolver.parametersByName(formattedRawPath);
- return resultMap;
+ return parameterResolver.parametersByName(formattedRawPath);
}
}
} | ['enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,105,197 | 894,709 | 114,594 | 484 | 639 | 110 | 11 | 1 | 2,271 | 122 | 560 | 32 | 0 | 1 | 1970-01-01T00:27:18 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,116 | wso2/product-microgateway/2563/2572 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2572 | https://github.com/wso2/product-microgateway/pull/2563 | https://github.com/wso2/product-microgateway/pull/2563 | 1 | fixes | Avoid Unwanted headers/query parameters being forwarded to the backend. | ### Description:
There can be headers which are not supposed to forwarded to backend as well as in the throttle publish event.
### Steps to reproduce:
Invoke API with APIKey.
And check the recieved set of headers in the backend.
### Affected Product Version:
choreo-connect-1.0.0-rc1
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| 90791d02ec7f352e55b10eb6528b4262c9fa5540 | a8b4f9d20717faa7b1dabb4023ac529cb7f3c484 | https://github.com/wso2/product-microgateway/compare/90791d02ec7f352e55b10eb6528b4262c9fa5540...a8b4f9d20717faa7b1dabb4023ac529cb7f3c484 | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
index 38f5b63e0..fa64ad2e5 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java
@@ -62,6 +62,10 @@ public class RequestContext {
private WebSocketFrameContext webSocketFrameContext;
private Map<String, String> queryParameters;
private Map<String, String> pathParameters;
+ private ArrayList<String> queryParamsToRemove;
+ // This is used to keep protected headers like authorization header. The protected headers will not be
+ // sent to the Traffic Manager when header based rate limiting is enabled.
+ private ArrayList<String> protectedHeaders;
// Request Timestamp is required for analytics
private long requestTimeStamp;
@@ -312,6 +316,28 @@ public class RequestContext {
return webSocketFrameContext;
}
+ /**
+ * If there is a set of query parameters needs to be removed from the outbound request, those parameters should
+ * be added to the arrayList here.
+ *
+ * @return query parameters which are supposed to be removed.
+ */
+ public ArrayList<String> getQueryParamsToRemove() {
+ return queryParamsToRemove;
+ }
+
+ /**
+ * If there is a set of headers needs to be removed from the throttle publishing event, those headers should
+ * be added to the arrayList here.
+ *
+ * Ex. Authorization Header
+ *
+ * @return header names which are not supposed to be published to the traffic manager.
+ */
+ public ArrayList<String> getProtectedHeaders() {
+ return protectedHeaders;
+ }
+
/**
* Implements builder pattern to build an {@link RequestContext} object.
*/
@@ -412,6 +438,8 @@ public class RequestContext {
requestContext.clientIp = this.clientIp;
requestContext.addHeaders = new HashMap<>();
requestContext.removeHeaders = new ArrayList<>();
+ requestContext.queryParamsToRemove = new ArrayList<>();
+ requestContext.protectedHeaders = new ArrayList<>();
String[] queryParts = this.requestPath.split("\\\\?");
String queryPrams = queryParts.length > 1 ? queryParts[1] : "";
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/ResponseObject.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/ResponseObject.java
index b4d34e3c6..36e0e588d 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/ResponseObject.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/ResponseObject.java
@@ -34,6 +34,9 @@ public class ResponseObject {
private ArrayList<String> removeHeaderMap = new ArrayList<>();
private Map<String, String> metaDataMap;
private boolean isDirectResponse = false;
+ private ArrayList<String> queryParamsToRemove = new ArrayList<>();
+ private Map<String, String> queryParams;
+ private String requestPath;
public ArrayList<String> getRemoveHeaderMap() {
return removeHeaderMap;
@@ -110,4 +113,28 @@ public class ResponseObject {
public void setMetaDataMap(Map<String, String> metaDataMap) {
this.metaDataMap = metaDataMap;
}
+
+ public ArrayList<String> getQueryParamsToRemove() {
+ return queryParamsToRemove;
+ }
+
+ public void setQueryParamsToRemove(ArrayList<String> queryParamsToRemove) {
+ this.queryParamsToRemove = queryParamsToRemove;
+ }
+
+ public Map<String, String> getQueryParamMap() {
+ return queryParams;
+ }
+
+ public void setQueryParamMap(Map<String, String> queryParams) {
+ this.queryParams = queryParams;
+ }
+
+ public String getRequestPath() {
+ return requestPath;
+ }
+
+ public void setRequestPath(String requestPath) {
+ this.requestPath = requestPath;
+ }
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/RestAPI.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/RestAPI.java
index b89f6cbca..d5f998815 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/RestAPI.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/RestAPI.java
@@ -157,17 +157,15 @@ public class RestAPI implements API {
@Override
public ResponseObject process(RequestContext requestContext) {
ResponseObject responseObject = new ResponseObject(requestContext.getRequestID());
+ responseObject.setRequestPath(requestContext.getRequestPath());
boolean analyticsEnabled = ConfigHolder.getInstance().getConfig().getAnalyticsConfig().isEnabled();
- // Process to-be-removed headers
- AuthHeaderDto authHeader = ConfigHolder.getInstance().getConfig().getAuthHeader();
- if (!authHeader.isEnableOutboundAuthHeader()) {
- String authHeaderName = FilterUtils.getAuthHeaderName(requestContext);
- requestContext.getRemoveHeaders().add(authHeaderName);
- }
+ populateRemoveAndProtectedHeaders(requestContext);
if (executeFilterChain(requestContext)) {
responseObject.setRemoveHeaderMap(requestContext.getRemoveHeaders());
+ responseObject.setQueryParamsToRemove(requestContext.getQueryParamsToRemove());
+ responseObject.setQueryParamMap(requestContext.getQueryParameters());
responseObject.setStatusCode(APIConstants.StatusCodes.OK.getCode());
if (requestContext.getAddHeaders() != null && requestContext.getAddHeaders().size() > 0) {
responseObject.setHeaderMap(requestContext.getAddHeaders());
@@ -288,4 +286,43 @@ public class RestAPI implements API {
}
}
}
+
+ private void populateRemoveAndProtectedHeaders(RequestContext requestContext) {
+ Map<String, SecuritySchemaConfig> securitySchemeDefinitions =
+ requestContext.getMatchedAPI().getSecuritySchemeDefinitions();
+ // API key headers are considered to be protected headers, such that the header would not be sent
+ // to backend and traffic manager.
+ // This would prevent leaking credentials, even if user is invoking unsecured resource with some
+ // credentials.
+ for (Map.Entry<String, SecuritySchemaConfig> entry : securitySchemeDefinitions.entrySet()) {
+ SecuritySchemaConfig schema = entry.getValue();
+ if (APIConstants.SWAGGER_API_KEY_AUTH_TYPE_NAME.equalsIgnoreCase(schema.getType())) {
+ if (APIConstants.SWAGGER_API_KEY_IN_HEADER.equals(schema.getIn())) {
+ requestContext.getProtectedHeaders().add(schema.getName());
+ requestContext.getRemoveHeaders().add(schema.getName());
+ continue;
+ }
+ if (APIConstants.SWAGGER_API_KEY_IN_QUERY.equals(schema.getIn())) {
+ requestContext.getQueryParamsToRemove().add(schema.getName());
+ }
+ }
+ }
+
+ // Internal-Key credential is considered to be protected headers, such that the header would not be sent
+ // to backend and traffic manager.
+ String internalKeyHeader = ConfigHolder.getInstance().getConfig().getAuthHeader()
+ .getTestConsoleHeaderName().toLowerCase();
+ requestContext.getRemoveHeaders().add(internalKeyHeader);
+ // Avoid internal key being published to the Traffic Manager
+ requestContext.getProtectedHeaders().add(internalKeyHeader);
+
+ // Remove Authorization Header
+ AuthHeaderDto authHeader = ConfigHolder.getInstance().getConfig().getAuthHeader();
+ String authHeaderName = FilterUtils.getAuthHeaderName(requestContext);
+ if (!authHeader.isEnableOutboundAuthHeader()) {
+ requestContext.getRemoveHeaders().add(authHeaderName);
+ }
+ // Authorization Header should not be included in the throttle publishing event.
+ requestContext.getProtectedHeaders().add(authHeaderName);
+ }
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/APIConstants.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/APIConstants.java
index 80bd4b0ea..ea792da28 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/APIConstants.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/APIConstants.java
@@ -86,6 +86,7 @@ public class APIConstants {
public static final String APPLICATION_JSON = "application/json";
public static final String API_TRACE_KEY = "X-TRACE-KEY";
public static final String X_FORWARDED_FOR = "x-forwarded-for";
+ public static final String PATH_HEADER = ":path";
public static final String LOG_TRACE_ID = "traceId";
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
index 594749436..6225ef6e8 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java
@@ -47,6 +47,9 @@ import org.wso2.choreo.connect.enforcer.tracing.TracingSpan;
import org.wso2.choreo.connect.enforcer.tracing.TracingTracer;
import org.wso2.choreo.connect.enforcer.tracing.Utils;
+import java.util.List;
+import java.util.Map;
+
/**
* This is the gRPC server written to match with the envoy ext-authz filter proto file. Envoy proxy call this service.
* This is the entry point to the filter chain process for a request.
@@ -140,6 +143,21 @@ public class ExtAuthService extends AuthorizationGrpc.AuthorizationImplBase {
.build();
} else {
OkHttpResponse.Builder okResponseBuilder = OkHttpResponse.newBuilder();
+
+ // If the user is sending the APIKey credentials within query parameters, those query parameters should
+ // not be sent to the backend. Hence, the :path header needs to be constructed again removing the apiKey
+ // query parameter. In this scenario, apiKey query parameter is sent within the property called
+ // 'queryParamsToRemove' so that the custom filters also can utilize the method.
+ if (responseObject.getQueryParamsToRemove().size() > 0) {
+ String constructedPath = constructQueryParamString(responseObject.getRequestPath(),
+ responseObject.getQueryParamMap(), responseObject.getQueryParamsToRemove());
+ HeaderValueOption headerValueOption = HeaderValueOption.newBuilder()
+ .setHeader(HeaderValue.newBuilder().setKey(APIConstants.PATH_HEADER).setValue(constructedPath)
+ .build())
+ .build();
+ okResponseBuilder.addHeaders(headerValueOption);
+ }
+
if (responseObject.getHeaderMap() != null) {
responseObject.getHeaderMap().forEach((key, value) -> {
HeaderValueOption headerValueOption = HeaderValueOption.newBuilder()
@@ -180,4 +198,34 @@ public class ExtAuthService extends AuthorizationGrpc.AuthorizationImplBase {
}
return Code.INTERNAL_VALUE;
}
+
+ private String constructQueryParamString(String requestPath, Map<String, String> queryParamMap,
+ List<String> queryParamsToRemove) {
+ // If no query parameters needs to be removed, then the request path can be applied as it is.
+ if (queryParamsToRemove.size() == 0) {
+ return requestPath;
+ }
+
+ String pathWithoutQueryParams = requestPath.split("\\\\?")[0];
+ StringBuilder requestPathBuilder = new StringBuilder(pathWithoutQueryParams);
+ int count = 0;
+ if (queryParamMap.size() > 0) {
+ for (String queryParam : queryParamMap.keySet()) {
+ if (queryParamsToRemove.contains(queryParam)) {
+ continue;
+ }
+ if (count == 0) {
+ requestPathBuilder.append("?");
+ } else {
+ requestPathBuilder.append("&");
+ }
+ requestPathBuilder.append(queryParam);
+ if (queryParamMap.get(queryParam) != null) {
+ requestPathBuilder.append("=").append(queryParamMap.get(queryParam));
+ }
+ count++;
+ }
+ }
+ return requestPathBuilder.toString();
+ }
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
index dde3ff0bf..681d0602e 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
@@ -96,8 +96,6 @@ public class InternalAPIKeyAuthenticator extends APIKeyHandler {
}
// Extract internal from the request while removing it from the msg context.
String internalKey = extractInternalKey(requestContext);
- // Remove internal key from outbound request
- requestContext.getRemoveHeaders().add(securityParam);
String[] splitToken = internalKey.split("\\\\.");
SignedJWT signedJWT = SignedJWT.parse(internalKey);
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
index 19f1a7abf..62f359b33 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java
@@ -394,6 +394,15 @@ public class ThrottleFilter implements Filter {
if (config.isHeaderConditionsEnabled()) {
Map<String, String> headers = requestContext.getHeaders();
for (String name : headers.keySet()) {
+ // To avoid publishing user token to the traffic manager.
+ if (requestContext.getProtectedHeaders().contains(name)) {
+ continue;
+ }
+ // Sending path header is stopped as it could contain query parameters which are used
+ // to secure APIs.
+ if (name.equals(APIConstants.PATH_HEADER)) {
+ continue;
+ }
jsonObMap.put(name, headers.get(name));
}
}
@@ -401,6 +410,10 @@ public class ThrottleFilter implements Filter {
if (config.isQueryConditionsEnabled()) {
Map<String, String> params = requestContext.getQueryParameters();
for (String name : params.keySet()) {
+ // To avoid publishing apiKey to the traffic manager.
+ if (requestContext.getQueryParamsToRemove().contains(name)) {
+ continue;
+ }
jsonObMap.put(name, params.get(name));
}
} | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/grpc/ExtAuthService.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/throttle/ThrottleFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/constants/APIConstants.java', 'enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContext.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/ResponseObject.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/api/RestAPI.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 4,107,223 | 895,149 | 114,646 | 485 | 8,230 | 1,429 | 168 | 7 | 623 | 89 | 141 | 27 | 0 | 0 | 1970-01-01T00:27:19 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,125 | wso2/product-microgateway/2455/2454 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2454 | https://github.com/wso2/product-microgateway/pull/2455 | https://github.com/wso2/product-microgateway/pull/2455 | 1 | fixes | backend basic auth security not working when API is not not secured | ### Description:
<!-- Describe the issue -->
### Steps to reproduce:
create api.
disable security for an resource.
add production endpoint basic auth security
deploy and invoke the resource.
backend call does not have basic auth header
### Affected Product Version:
<!-- Members can use Affected/*** labels -->
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| e840cade2e225ecbca163c832f8c9e0c6dc0a2cb | 2c408d34211682eb6e6a0556ee47dd387e1d3ada | https://github.com/wso2/product-microgateway/compare/e840cade2e225ecbca163c832f8c9e0c6dc0a2cb...2c408d34211682eb6e6a0556ee47dd387e1d3ada | diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
index a1687eeb8..d83d0e2c3 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java
@@ -37,6 +37,7 @@ import org.wso2.choreo.connect.enforcer.security.jwt.APIKeyAuthenticator;
import org.wso2.choreo.connect.enforcer.security.jwt.InternalAPIKeyAuthenticator;
import org.wso2.choreo.connect.enforcer.security.jwt.JWTAuthenticator;
import org.wso2.choreo.connect.enforcer.security.jwt.UnsecuredAPIAuthenticator;
+import org.wso2.choreo.connect.enforcer.util.EndpointSecurityUtils;
import org.wso2.choreo.connect.enforcer.util.FilterUtils;
import java.util.ArrayList;
@@ -153,6 +154,8 @@ public class AuthFilter implements Filter {
requestContext.setAuthenticationContext(authenticate);
if (authenticate.isAuthenticated()) {
updateClusterHeaderAndCheckEnv(requestContext, authenticate);
+ // set backend security
+ EndpointSecurityUtils.addEndpointSecurity(requestContext);
return new AuthenticationResponse(true, false,
false);
}
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
index 222fb4294..c53c76234 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java
@@ -43,7 +43,6 @@ import org.wso2.choreo.connect.enforcer.dto.APIKeyValidationInfoDTO;
import org.wso2.choreo.connect.enforcer.dto.JWTTokenPayloadInfo;
import org.wso2.choreo.connect.enforcer.exception.APISecurityException;
import org.wso2.choreo.connect.enforcer.util.BackendJwtUtils;
-import org.wso2.choreo.connect.enforcer.util.EndpointSecurityUtils;
import org.wso2.choreo.connect.enforcer.util.FilterUtils;
import org.wso2.choreo.connect.enforcer.util.JWTUtils;
@@ -251,9 +250,6 @@ public class APIKeyAuthenticator extends APIKeyHandler {
// Get APIKeyValidationInfoDTO
APIKeyValidationInfoDTO apiKeyValidationInfoDTO = getAPIKeyValidationDTO(requestContext, payload);
- // Sets endpoint security
- EndpointSecurityUtils.addEndpointSecurity(requestContext, apiKeyValidationInfoDTO);
-
// TODO: Add analytics data processing
// Get SignedJWTInfo
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
index 8e6a94053..92469e697 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java
@@ -44,7 +44,6 @@ import org.wso2.choreo.connect.enforcer.tracing.TracingSpan;
import org.wso2.choreo.connect.enforcer.tracing.TracingTracer;
import org.wso2.choreo.connect.enforcer.tracing.Utils;
import org.wso2.choreo.connect.enforcer.util.BackendJwtUtils;
-import org.wso2.choreo.connect.enforcer.util.EndpointSecurityUtils;
import org.wso2.choreo.connect.enforcer.util.FilterUtils;
import java.text.ParseException;
@@ -210,9 +209,6 @@ public class InternalAPIKeyAuthenticator extends APIKeyHandler {
//Get APIKeyValidationInfoDTO for internal key with limited info
APIKeyValidationInfoDTO apiKeyValidationInfoDTO = getAPIKeyValidationDTO(requestContext, payload);
- // Sets endpoint security
- EndpointSecurityUtils.addEndpointSecurity(requestContext, apiKeyValidationInfoDTO);
-
// Generate or get backend JWT
JWTConfigurationDto jwtConfigurationDto = ConfigHolder.getInstance().
getConfig().getJwtConfigurationDto();
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
index 00439e100..3c31fad10 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java
@@ -54,7 +54,6 @@ import org.wso2.choreo.connect.enforcer.tracing.TracingSpan;
import org.wso2.choreo.connect.enforcer.tracing.TracingTracer;
import org.wso2.choreo.connect.enforcer.tracing.Utils;
import org.wso2.choreo.connect.enforcer.util.BackendJwtUtils;
-import org.wso2.choreo.connect.enforcer.util.EndpointSecurityUtils;
import org.wso2.choreo.connect.enforcer.util.FilterUtils;
import org.wso2.choreo.connect.enforcer.util.JWTUtils;
@@ -237,9 +236,6 @@ public class JWTAuthenticator implements Authenticator {
}
log.debug("JWT authentication successful.");
- // Set endpoint security
- EndpointSecurityUtils.addEndpointSecurity(requestContext, apiKeyValidationInfoDTO);
-
// Generate or get backend JWT
String endUserToken = null;
JWTConfigurationDto backendJwtConfig = ConfigHolder.getInstance().getConfig().
diff --git a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/EndpointSecurityUtils.java b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/EndpointSecurityUtils.java
index 39b56f5b0..d2d5e05eb 100644
--- a/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/EndpointSecurityUtils.java
+++ b/enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/EndpointSecurityUtils.java
@@ -21,7 +21,6 @@ package org.wso2.choreo.connect.enforcer.util;
import org.wso2.choreo.connect.enforcer.commons.model.RequestContext;
import org.wso2.choreo.connect.enforcer.commons.model.SecurityInfo;
import org.wso2.choreo.connect.enforcer.constants.APIConstants;
-import org.wso2.choreo.connect.enforcer.dto.APIKeyValidationInfoDTO;
import java.util.Base64;
@@ -34,30 +33,31 @@ public class EndpointSecurityUtils {
* Adds the backend endpoint security header to the given requestContext.
*
* @param requestContext requestContext instance to add the backend endpoint security header
- * @param apiKeyValidationInfoDTO apiKeyValidationInfoDTO containing necessary info
*/
- public static void addEndpointSecurity(RequestContext requestContext,
- APIKeyValidationInfoDTO apiKeyValidationInfoDTO) {
- SecurityInfo securityInfo;
- if (apiKeyValidationInfoDTO.getType() != null &&
- requestContext.getMatchedAPI().getEndpointSecurity() != null) {
- if (apiKeyValidationInfoDTO.getType().equals(APIConstants.API_KEY_TYPE_PRODUCTION)) {
+ public static void addEndpointSecurity(RequestContext requestContext) {
+ SecurityInfo securityInfo = null;
+ String keyType = "";
+ if (requestContext.getAuthenticationContext() != null) {
+ keyType = requestContext.getAuthenticationContext().getKeyType();
+ }
+ if (requestContext.getMatchedAPI().getEndpointSecurity() != null) {
+ if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(keyType)) {
securityInfo = requestContext.getMatchedAPI().getEndpointSecurity().
getProductionSecurityInfo();
} else {
securityInfo = requestContext.getMatchedAPI().getEndpointSecurity().
getSandBoxSecurityInfo();
}
- if (securityInfo != null && securityInfo.isEnabled() &&
- APIConstants.AUTHORIZATION_HEADER_BASIC.
- equalsIgnoreCase(securityInfo.getSecurityType())) {
- requestContext.getRemoveHeaders().remove(APIConstants.AUTHORIZATION_HEADER_DEFAULT
- .toLowerCase());
- requestContext.addOrModifyHeaders(APIConstants.AUTHORIZATION_HEADER_DEFAULT,
- APIConstants.AUTHORIZATION_HEADER_BASIC + ' ' +
- Base64.getEncoder().encodeToString((securityInfo.getUsername() +
- ':' + String.valueOf(securityInfo.getPassword())).getBytes()));
- }
+ }
+ if (securityInfo != null && securityInfo.isEnabled() &&
+ APIConstants.AUTHORIZATION_HEADER_BASIC.
+ equalsIgnoreCase(securityInfo.getSecurityType())) {
+ requestContext.getRemoveHeaders().remove(APIConstants.AUTHORIZATION_HEADER_DEFAULT
+ .toLowerCase());
+ requestContext.addOrModifyHeaders(APIConstants.AUTHORIZATION_HEADER_DEFAULT,
+ APIConstants.AUTHORIZATION_HEADER_BASIC + ' ' +
+ Base64.getEncoder().encodeToString((securityInfo.getUsername() +
+ ':' + String.valueOf(securityInfo.getPassword())).getBytes()));
}
}
}
diff --git a/integration/mock-backend-server/src/main/java/org/wso2/choreo/connect/mockbackend/MockBackendProd.java b/integration/mock-backend-server/src/main/java/org/wso2/choreo/connect/mockbackend/MockBackendProd.java
index 3851bdaf4..4ddf8c491 100644
--- a/integration/mock-backend-server/src/main/java/org/wso2/choreo/connect/mockbackend/MockBackendProd.java
+++ b/integration/mock-backend-server/src/main/java/org/wso2/choreo/connect/mockbackend/MockBackendProd.java
@@ -291,6 +291,7 @@ public class MockBackendProd extends Thread {
// response flow headers to interceptor service
// sent request body in response body
httpServer.createContext(context + "/echo", Utils::echo);
+ httpServer.createContext(context + "/echo2", Utils::echo);
httpServer.start();
} catch (Exception ex) {
diff --git a/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/withapim/BackendSecurityTestCase.java b/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/withapim/BackendSecurityTestCase.java
index ef1d33e75..f25d38fc7 100644
--- a/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/withapim/BackendSecurityTestCase.java
+++ b/integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/withapim/BackendSecurityTestCase.java
@@ -81,8 +81,15 @@ public class BackendSecurityTestCase extends ApimBaseTest {
apiOperation.setTarget("/echo");
apiOperation.setThrottlingPolicy(TestConstant.API_TIER.UNLIMITED);
+ APIOperationsDTO apiOperation2 = new APIOperationsDTO();
+ apiOperation2.setVerb("GET");
+ apiOperation2.setTarget("/echo2");
+ apiOperation2.setThrottlingPolicy(TestConstant.API_TIER.UNLIMITED);
+ apiOperation2.setAuthType("None");
+
List<APIOperationsDTO> operationsDTOS = new ArrayList<>();
operationsDTOS.add(apiOperation);
+ operationsDTOS.add(apiOperation2);
APIRequest apiRequest = PublisherUtils.createSampleAPIRequest(API_NAME, API_CONTEXT,
API_VERSION, user.getUserName());
@@ -102,17 +109,29 @@ public class BackendSecurityTestCase extends ApimBaseTest {
Utils.delay(TestConstant.DEPLOYMENT_WAIT_TIME * 2, "Interrupted when waiting for the " +
"subscription to be deployed");
+ //test 1 - jwt secured resource
//Invoke API
Map<String, String> headers = new HashMap<>();
headers.put(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
String endpoint = Utils.getServiceURLHttps(API_CONTEXT + "/1.0.0/echo");
HttpResponse response = HttpsClientRequest.doGet(endpoint, headers);
- Assert.assertNotNull(response, "Error occurred while invoking the endpoint " + endpoint + " HttpResponse ");
+ Assert.assertNotNull(response, "Error occurred while invoking the endpoint " + endpoint);
// test headers
Map<String, String> respHeaders = response.getHeaders();
Assert.assertTrue(respHeaders.containsKey("authorization"), "Backend did not receive auth header");
Assert.assertEquals(respHeaders.get("authorization"), "Basic YWRtaW46YWRtaW4=",
"backend basic auth header is incorrect");
+
+ //test 2 - invoke non secured resource
+ endpoint = Utils.getServiceURLHttps(API_CONTEXT + "/1.0.0/echo2");
+ response = HttpsClientRequest.doGet(endpoint);
+ Assert.assertNotNull(response, "Error occurred while invoking the endpoint " + endpoint);
+
+ // test headers
+ respHeaders = response.getHeaders();
+ Assert.assertTrue(respHeaders.containsKey("authorization"), "Backend did not receive auth header");
+ Assert.assertEquals(respHeaders.get("authorization"), "Basic YWRtaW46YWRtaW4=",
+ "backend basic auth header is incorrect");
}
} | ['enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/APIKeyAuthenticator.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/InternalAPIKeyAuthenticator.java', 'integration/mock-backend-server/src/main/java/org/wso2/choreo/connect/mockbackend/MockBackendProd.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/util/EndpointSecurityUtils.java', 'integration/test-integration/src/test/java/org/wso2/choreo/connect/tests/testcases/withapim/BackendSecurityTestCase.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/AuthFilter.java', 'enforcer-parent/enforcer/src/main/java/org/wso2/choreo/connect/enforcer/security/jwt/JWTAuthenticator.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 4,081,695 | 889,815 | 114,093 | 484 | 3,418 | 568 | 52 | 6 | 648 | 92 | 131 | 29 | 0 | 0 | 1970-01-01T00:27:17 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
1,126 | wso2/product-microgateway/2438/2439 | wso2 | product-microgateway | https://github.com/wso2/product-microgateway/issues/2439 | https://github.com/wso2/product-microgateway/pull/2438 | https://github.com/wso2/product-microgateway/pull/2438 | 1 | fixes | PathParameters does not resolve if the rawPath contains a trailing slash | ### Description:
If there is a path called `/pet/{petId}` and the inbound request has `/pet/2/`. Due to trailing slash, the path parameters are not resolved for the usage of custom filters.
And following error log also getting printed.
```
PathTemplate and RawPath is mismatched.
```
### Steps to reproduce:
Refer to the description.
### Affected Product Version:
Choreo-Connect-1.0.0-alpha
### Environment details (with versions):
- OS:
- Client:
- Env (Docker/K8s):
---
### Optional Fields
#### Related Issues:
<!-- Any related issues from this/other repositories-->
#### Suggested Labels:
<!--Only to be used by non-members-->
#### Suggested Assignees:
<!--Only to be used by non-members-->
| aa0cde140e39ce785d7355d67b1ec979ddf269b6 | 9037df6cec519f8abd3c1ac4653de836948752ae | https://github.com/wso2/product-microgateway/compare/aa0cde140e39ce785d7355d67b1ec979ddf269b6...9037df6cec519f8abd3c1ac4653de836948752ae | diff --git a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
index 4ae2c4c83..1ba41f305 100644
--- a/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
+++ b/enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java
@@ -29,14 +29,16 @@ import java.util.regex.Pattern;
class ParameterResolver {
- private static final Pattern PARAMETER_PATTERN = Pattern.compile("(\\\\{[a-zA-Z]+\\\\})");
+ private static final Pattern PARAMETER_PATTERN = Pattern.compile("(\\\\{[a-zA-Z0-9]+[a-z-_A-Z0-9]*\\\\})");
private static final Logger logger = LogManager.getLogger(ParameterResolver.class);
private final List<String> parameterNames = new ArrayList<>();
private final Pattern pattern;
public ParameterResolver(final String parameterTemplate) {
-
- final Matcher matcher = PARAMETER_PATTERN.matcher(parameterTemplate);
+ // This formatting is required since /foo and /foo/ are considered to be equal
+ String formattedPathParamTemplate = parameterTemplate.endsWith("/") ?
+ parameterTemplate.substring(0, parameterTemplate.length() - 1) : parameterTemplate;
+ final Matcher matcher = PARAMETER_PATTERN.matcher(formattedPathParamTemplate);
while (matcher.find()) {
if (matcher.groupCount() == 1) {
@@ -55,7 +57,10 @@ class ParameterResolver {
}
public Map<String, String> parametersByName(final String uriString) throws IllegalArgumentException {
- final Matcher matcher = pattern.matcher(uriString);
+ // This formatting is required since /foo and /foo/ are considered to be equal
+ String formattedURI = uriString.endsWith("/") ?
+ uriString.substring(0, uriString.length() - 1) : uriString;
+ final Matcher matcher = pattern.matcher(formattedURI);
if (!matcher.matches()) {
// Unlikely to occur as this pair is already matched within router.
logger.error("PathTemplate and RawPath is mismatched.");
diff --git a/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java b/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
index 4b6263f0a..6c9e4fe31 100644
--- a/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
+++ b/enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java
@@ -38,6 +38,36 @@ public class RequestContextTest {
"statusType", "available");
}
+ @Test
+ public void testPathParametersWithTrailingSlashInTemplate() {
+ testPathParamValues("/v2/pet/12/status/available", "/v2", "/pet/{petId}/status/{statusType}/",
+ "petId", "12");
+ }
+
+ @Test
+ public void testPathParametersWithTrailingSlashInRawPath() {
+ testPathParamValues("/v2/pet/12/status/available/", "/v2", "/pet/{petId}/status/{statusType}",
+ "petId", "12");
+ }
+
+ @Test
+ public void testPathParametersWithHyphen() {
+ testPathParamValues("/v2/pet/12/status/available", "/v2", "/pet/{pet-Id}/status/{statusType}",
+ "pet-Id", "12");
+ }
+
+ @Test
+ public void testPathParametersWithUnderscore() {
+ testPathParamValues("/v2/pet/12/status/available", "/v2", "/pet/{pet_Id}/status/{statusType}",
+ "pet_Id", "12");
+ }
+
+ @Test
+ public void testPathParametersWithNumbers() {
+ testPathParamValues("/v2/pet/12/status/available", "/v2", "/pet/{petId2}/status/{statusType}",
+ "petId2", "12");
+ }
+
@Test
public void testPathParameterGenerationWithWildcard() {
testPathParamValues("/v2/pet/12/2/random/random2", "/v2", "/pet/{petId}/{imageId}/*", | ['enforcer-parent/commons/src/main/java/org/wso2/choreo/connect/enforcer/commons/model/ParameterResolver.java', 'enforcer-parent/commons/src/test/java/org/wso2/choreo/connect/enforcer/commons/model/RequestContextTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 4,079,219 | 888,852 | 114,000 | 480 | 984 | 202 | 13 | 1 | 730 | 102 | 166 | 30 | 0 | 1 | 1970-01-01T00:27:17 | 247 | Java | {'Java': 2861210, 'Go': 979303, 'Lua': 49518, 'Shell': 37030, 'C++': 27281, 'Dockerfile': 16279, 'HTML': 5630, 'Starlark': 2894, 'Jinja': 1646, 'Open Policy Agent': 539} | Apache License 2.0 |
726 | dbs-leipzig/gradoop/803/802 | dbs-leipzig | gradoop | https://github.com/dbs-leipzig/gradoop/issues/802 | https://github.com/dbs-leipzig/gradoop/pull/803 | https://github.com/dbs-leipzig/gradoop/pull/803 | 1 | fixes | PropertyValue returns wrong types | The function PropertyValue.getType() returns the wrong objects for the following types:
- `TYPE_LIST`
- `TYPE_DATE`
- `TYPE_TIME`
- `TYPE_DATETIME`
Fix bug and add unit tests for this function. | 6fe007fe705845b1f9ce0857c1149932c06e20d6 | 632f79428658c49206d1295c7cf4760266a7ab1b | https://github.com/dbs-leipzig/gradoop/compare/6fe007fe705845b1f9ce0857c1149932c06e20d6...632f79428658c49206d1295c7cf4760266a7ab1b | diff --git a/gradoop-common/src/main/java/org/gradoop/common/model/impl/properties/PropertyValue.java b/gradoop-common/src/main/java/org/gradoop/common/model/impl/properties/PropertyValue.java
index 8e0d06c20d4..6ed36757fd1 100644
--- a/gradoop-common/src/main/java/org/gradoop/common/model/impl/properties/PropertyValue.java
+++ b/gradoop-common/src/main/java/org/gradoop/common/model/impl/properties/PropertyValue.java
@@ -41,6 +41,7 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Collections;
/**
* Represents a single property value in the EPGM.
@@ -137,6 +138,11 @@ public class PropertyValue implements Value, Serializable, Comparable<PropertyVa
*/
private static final long serialVersionUID = 1L;
+ /**
+ * Mapping from byte value to associated Class
+ */
+ private static final Map<Byte, Class> TYPE_MAPPING = getTypeMap();
+
/**
* Stores the type and the value
*/
@@ -723,21 +729,36 @@ public class PropertyValue implements Value, Serializable, Comparable<PropertyVa
// Util
//----------------------------------------------------------------------------
+ /**
+ * Get the data type as class object according to the first position of the rawBytes[] array
+ *
+ * @return Class object
+ */
public Class<?> getType() {
- return rawBytes[0] == TYPE_BOOLEAN ?
- Boolean.class : rawBytes[0] == TYPE_INTEGER ?
- Integer.class : rawBytes[0] == TYPE_LONG ?
- Long.class : rawBytes[0] == TYPE_FLOAT ?
- Float.class : rawBytes[0] == TYPE_DOUBLE ?
- Double.class : rawBytes[0] == TYPE_STRING ?
- String.class : rawBytes[0] == TYPE_BIG_DECIMAL ?
- BigDecimal.class : rawBytes[0] == TYPE_GRADOOP_ID ?
- GradoopId.class : rawBytes[0] == TYPE_MAP ?
- Map.class : rawBytes[0] == TYPE_LIST ?
- LocalDate.class : rawBytes[0] == TYPE_DATE ?
- LocalTime.class : rawBytes[0] == TYPE_TIME ?
- LocalDateTime.class : rawBytes[0] == TYPE_DATETIME ?
- List.class : null;
+ return TYPE_MAPPING.get(rawBytes[0]);
+ }
+
+ /**
+ * Creates a type mapping HashMap to assign a byte value to its represented Class
+ *
+ * @return a Map with byte to class assignments
+ */
+ private static Map<Byte, Class> getTypeMap() {
+ Map<Byte, Class> map = new HashMap<>();
+ map.put(TYPE_BOOLEAN, Boolean.class);
+ map.put(TYPE_INTEGER, Integer.class);
+ map.put(TYPE_LONG, Long.class);
+ map.put(TYPE_FLOAT, Float.class);
+ map.put(TYPE_DOUBLE, Double.class);
+ map.put(TYPE_STRING, String.class);
+ map.put(TYPE_BIG_DECIMAL, BigDecimal.class);
+ map.put(TYPE_GRADOOP_ID, GradoopId.class);
+ map.put(TYPE_MAP, Map.class);
+ map.put(TYPE_LIST, List.class);
+ map.put(TYPE_DATE, LocalDate.class);
+ map.put(TYPE_TIME, LocalTime.class);
+ map.put(TYPE_DATETIME, LocalDateTime.class);
+ return Collections.unmodifiableMap(map);
}
public int getByteSize() {
diff --git a/gradoop-common/src/test/java/org/gradoop/common/model/impl/properties/PropertyValueTest.java b/gradoop-common/src/test/java/org/gradoop/common/model/impl/properties/PropertyValueTest.java
index 988b8da4303..f3b81a48623 100644
--- a/gradoop-common/src/test/java/org/gradoop/common/model/impl/properties/PropertyValueTest.java
+++ b/gradoop-common/src/test/java/org/gradoop/common/model/impl/properties/PropertyValueTest.java
@@ -898,6 +898,52 @@ public class PropertyValueTest {
p = create(DATETIME_VAL_d);
assertEquals(p, writeAndReadFields(PropertyValue.class, p));
}
+
+ @Test
+ public void testGetType() {
+ PropertyValue p = create(NULL_VAL_0);
+ assertNull(p.getType());
+
+ p = create(BOOL_VAL_1);
+ assertEquals(Boolean.class, p.getType());
+
+ p = create(INT_VAL_2);
+ assertEquals(Integer.class, p.getType());
+
+ p = create(LONG_VAL_3);
+ assertEquals(Long.class, p.getType());
+
+ p = create(FLOAT_VAL_4);
+ assertEquals(Float.class, p.getType());
+
+ p = create(DOUBLE_VAL_5);
+ assertEquals(Double.class, p.getType());
+
+ p = create(STRING_VAL_6);
+ assertEquals(String.class, p.getType());
+
+ p = create(BIG_DECIMAL_VAL_7);
+ assertEquals(BigDecimal.class, p.getType());
+
+ p = create(GRADOOP_ID_VAL_8);
+ assertEquals(GradoopId.class, p.getType());
+
+ p = create(MAP_VAL_9);
+ assertEquals(Map.class, p.getType());
+
+ p = create(LIST_VAL_a);
+ assertEquals(List.class, p.getType());
+
+ p = create(DATE_VAL_b);
+ assertEquals(LocalDate.class, p.getType());
+
+ p = create(TIME_VAL_c);
+ assertEquals(LocalTime.class, p.getType());
+
+ p = create(DATETIME_VAL_d);
+ assertEquals(LocalDateTime.class, p.getType());
+ }
+
/**
* Assumes that p1 == p2 < p3
*/ | ['gradoop-common/src/test/java/org/gradoop/common/model/impl/properties/PropertyValueTest.java', 'gradoop-common/src/main/java/org/gradoop/common/model/impl/properties/PropertyValue.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,229,627 | 724,737 | 97,777 | 1,255 | 2,011 | 483 | 49 | 1 | 202 | 28 | 46 | 8 | 0 | 0 | 1970-01-01T00:25:28 | 239 | Java | {'Java': 6591650, 'Shell': 2289} | Apache License 2.0 |
727 | dbs-leipzig/gradoop/787/786 | dbs-leipzig | gradoop | https://github.com/dbs-leipzig/gradoop/issues/786 | https://github.com/dbs-leipzig/gradoop/pull/787 | https://github.com/dbs-leipzig/gradoop/pull/787 | 1 | fixes | Overlap operator does not consider the edges of the second graph | Graph 1 : `(A)-[message]->(B)-[message]->(C)`
Graph 2: `(A)-[like]->(B)-[message]->(C)`
Gaph1.overlap(Gaph2) returns the graph:
`(A)-[message]->(B)-[message]->(C)`
which is not the graph I expected.
In my opinion the proper result should be a overlap of vertices AND edges:
`(A) (B)-[message]->(C)`
The edge `(A)-[message]->(B)` does not occur in both graphs, but only in the first one.
The following code of the function Overlap.execute() is one solution approach. I join the edge-set of the second graph on the edge-set of the first graph:
```
public class Overlap [...] {
public LogicalGraph execute(LogicalGraph firstGraph, LogicalGraph secondGraph) {
[...]
DataSet<Edge> newEdges = firstGraph.getEdges()
.join(secondGraph.getEdges())
.where(new Id<>())
.equalTo(new Id<>())
.with(new LeftSide<>());
[...]
}
}
```
Related: #687
Also a UnitTest for this case should be added. | 0b27a64617966f478a6b0557e1b131aeb98238ce | a527afac168b0ed1fd0f1c87ba1154a7eb0322b0 | https://github.com/dbs-leipzig/gradoop/compare/0b27a64617966f478a6b0557e1b131aeb98238ce...a527afac168b0ed1fd0f1c87ba1154a7eb0322b0 | diff --git a/gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/overlap/Overlap.java b/gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/overlap/Overlap.java
index 83d1126b6a1..db1ba1594e3 100644
--- a/gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/overlap/Overlap.java
+++ b/gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/overlap/Overlap.java
@@ -21,8 +21,6 @@ import org.gradoop.common.model.impl.pojo.Vertex;
import org.gradoop.flink.model.api.epgm.LogicalGraph;
import org.gradoop.flink.model.api.operators.BinaryGraphToGraphOperator;
import org.gradoop.flink.model.impl.functions.epgm.Id;
-import org.gradoop.flink.model.impl.functions.epgm.SourceId;
-import org.gradoop.flink.model.impl.functions.epgm.TargetId;
import org.gradoop.flink.model.impl.functions.utils.LeftSide;
/**
@@ -50,12 +48,8 @@ public class Overlap implements BinaryGraphToGraphOperator {
.with(new LeftSide<>());
DataSet<Edge> newEdges = firstGraph.getEdges()
- .join(newVertices)
- .where(new SourceId<>())
- .equalTo(new Id<>())
- .with(new LeftSide<>())
- .join(newVertices)
- .where(new TargetId<>())
+ .join(secondGraph.getEdges())
+ .where(new Id<>())
.equalTo(new Id<>())
.with(new LeftSide<>());
diff --git a/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/overlap/OverlapTest.java b/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/overlap/OverlapTest.java
index 61d9385248c..da1fd1575b5 100644
--- a/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/overlap/OverlapTest.java
+++ b/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/overlap/OverlapTest.java
@@ -105,6 +105,19 @@ public class OverlapTest extends ReducibleBinaryOperatorsTestBase {
collectAndAssertTrue(derivedGraph1.overlap(derivedGraph2).equalsByElementIds(expected));
}
+ @Test
+ public void testVertexOnlyOverlappingGraphs() throws Exception {
+ FlinkAsciiGraphLoader loader = getLoaderFromString(
+ "g1[(a)-[e1]->(b)]" +
+ "g2[(a)-[e2]->(b)]" +
+ "expected[(a)(b)]");
+ LogicalGraph g1 = loader.getLogicalGraphByVariable("g1");
+ LogicalGraph g2 = loader.getLogicalGraphByVariable("g2");
+ LogicalGraph expected = loader.getLogicalGraphByVariable("expected");
+
+ collectAndAssertTrue(g1.overlap(g2).equalsByElementIds(expected));
+ }
+
@Test
public void testGraphContainment() throws Exception {
FlinkAsciiGraphLoader loader = getSocialNetworkLoader();
@@ -156,8 +169,10 @@ public class OverlapTest extends ReducibleBinaryOperatorsTestBase {
@Test
public void testReduceCollection() throws Exception {
FlinkAsciiGraphLoader loader = getLoaderFromString("" +
- "g1[(a)-[e1]->(b)];g2[(b)-[e2]->(c)]" +
- "g3[(c)-[e3]->(d)];g4[(a)-[e1]->(b)]" +
+ "g1[(a)-[e1]->(b)]" +
+ "g2[(b)-[e2]->(c)]" +
+ "g3[(c)-[e3]->(d)]" +
+ "g4[(a)-[e1]->(b)]" +
"exp12[(b)]" +
"exp13[]" +
"exp14[(a)-[e1]->(b)]" | ['gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/overlap/OverlapTest.java', 'gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/overlap/Overlap.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,195,494 | 717,476 | 96,934 | 1,242 | 361 | 84 | 10 | 1 | 992 | 116 | 259 | 31 | 0 | 1 | 1970-01-01T00:25:26 | 239 | Java | {'Java': 6591650, 'Shell': 2289} | Apache License 2.0 |
1,017 | cloudslang/cloud-slang/370/199 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/199 | https://github.com/CloudSlang/cloud-slang/pull/370 | https://github.com/CloudSlang/cloud-slang/pull/370#issuecomment-121254894 | 1 | fixes | Uninformative error message in CLI when missing spaces before 'required' keyword | When there are missing spaces before input's 'required' keyword, for example:
flow:
inputs:
- input:
required: true
and trying to run the flow in the CLI, you get an uninformative error message:
Command failed java.lang.RuntimeException: java.lang.RuntimeException: Error transforming source: flow_with_required_input.sl to a Slang model. null
| 847c906f25e97c48a9058c653eca079659fe3e42 | d232e7828e7a803967f05215517891c75e2f0797 | https://github.com/cloudslang/cloud-slang/compare/847c906f25e97c48a9058c653eca079659fe3e42...d232e7828e7a803967f05215517891c75e2f0797 | diff --git a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
index 5f92c01c4..1da4637a7 100644
--- a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
+++ b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
@@ -34,7 +34,7 @@ public abstract class AbstractInputsTransformer {
Map.Entry<String, ?> entry = ((Map<String, ?>) rawInput).entrySet().iterator().next();
Object entryValue = entry.getValue();
if(entryValue == null){
- throw new RuntimeException("Could not transform Input : " + rawInput + " Since it has a null value");
+ throw new RuntimeException("Could not transform Input : " + rawInput + " Since it has a null value.\\n\\nMake sure a value is specified or that indentation is properly done.");
}
if (entryValue instanceof Map) {
// - some_inputs: | ['cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 335,035 | 64,961 | 8,679 | 104 | 310 | 61 | 2 | 1 | 358 | 45 | 78 | 9 | 0 | 0 | 1970-01-01T00:23:56 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,020 | cloudslang/cloud-slang/869/868 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/868 | https://github.com/CloudSlang/cloud-slang/pull/869 | https://github.com/CloudSlang/cloud-slang/pull/869 | 1 | fixes | False validation error | if an input has the tag private: true, it should not be accessible to the user at runtime:
> 11:36:02 [ERROR] Failed to extract metadata for file: 'c:\\Users\\moldovas\\cloudslang\\cloud-slang-content\\content\\io\\cloudslang\\base\\datetime\\get_time.sl'.
> Error for executable io.cloudslang.base.datetime.get_time: Input 'localeLang' is missing description.
>
> ```
> - localeLang:
> default: ${get("locale_lang", "en")}
> private: true
> ```
| 70d43050c2f180b18170be7135e85e939f8405ce | 5b1aadbf21f9837d2cd7edaf0fa445af6149d41e | https://github.com/cloudslang/cloud-slang/compare/70d43050c2f180b18170be7135e85e939f8405ce...5b1aadbf21f9837d2cd7edaf0fa445af6149d41e | diff --git a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/validation/StaticValidatorImpl.java b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/validation/StaticValidatorImpl.java
index ed6f7ca2f..55bb76bc6 100644
--- a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/validation/StaticValidatorImpl.java
+++ b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/validation/StaticValidatorImpl.java
@@ -13,6 +13,7 @@ import io.cloudslang.lang.compiler.Extension;
import io.cloudslang.lang.compiler.modeller.model.Executable;
import io.cloudslang.lang.compiler.modeller.model.Metadata;
import io.cloudslang.lang.entities.bindings.InOutParam;
+import io.cloudslang.lang.entities.bindings.Input;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang.Validate;
import org.springframework.stereotype.Component;
@@ -55,7 +56,8 @@ public class StaticValidatorImpl implements StaticValidator {
for (InOutParam inOutParam : ListUtils.emptyIfNull(inOutParams)) {
if (metadataInOutParams == null) {
throw new MetadataMissingException(errorMessagePrefix + "s are missing description entirely.");
- } else if (metadataInOutParams.get(inOutParam.getName()) == null) {
+ } else if (metadataInOutParams.get(inOutParam.getName()) == null &&
+ (!(inOutParam instanceof Input) || !((Input) inOutParam).isPrivateInput())) {
throw new MetadataMissingException(errorMessagePrefix + " '" + inOutParam.getName() + "' is missing description.");
}
}
diff --git a/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/validation/StaticValidatorTest.java b/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/validation/StaticValidatorTest.java
index 55c8f0caf..75608c5d1 100644
--- a/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/validation/StaticValidatorTest.java
+++ b/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/validation/StaticValidatorTest.java
@@ -54,7 +54,8 @@ public class StaticValidatorTest {
List<Input> inputList = Lists.newArrayList(new Input.InputBuilder("input1", "value1").build(),
new Input.InputBuilder("input2", "value2").build(),
new Input.InputBuilder("input3", "value3").build());
- Flow newExecutable = new Flow(null, null, null, "no_dependencies", "empty_flow", inputList, null, null, new HashSet<String>(), SYSTEM_PROPERTY_DEPENDENCIES);
+ Flow newExecutable = new Flow(null, null, null, "no_dependencies", "empty_flow", inputList, null, null,
+ new HashSet<String>(), SYSTEM_PROPERTY_DEPENDENCIES);
Metadata metadata = new Metadata();
Map<String, String> inputMap = new HashMap<>();
inputMap.put("input1", "description1");
@@ -63,7 +64,25 @@ public class StaticValidatorTest {
exception.expect(RuntimeException.class);
exception.expectMessage("Error for executable no_dependencies.empty_flow: Input 'input3' is missing description.");
- staticValidator.validateSlangFile(new File(getClass().getResource("/no_dependencies/empty_flow.sl").toURI()), newExecutable, metadata, true);
+ staticValidator.validateSlangFile(new File(getClass().getResource("/no_dependencies/empty_flow.sl").toURI()),
+ newExecutable, metadata, true);
+ }
+
+ @Test
+ public void missingDescriptionForPrivateInputInput() throws URISyntaxException {
+ List<Input> inputList = Lists.newArrayList(new Input.InputBuilder("input1", "value1").build(),
+ new Input.InputBuilder("input2", "value2").build(),
+ new Input.InputBuilder("input3", "value3").withPrivateInput(true).build());
+ Flow newExecutable = new Flow(null, null, null, "no_dependencies", "empty_flow", inputList, null, null,
+ new HashSet<String>(), SYSTEM_PROPERTY_DEPENDENCIES);
+ Metadata metadata = new Metadata();
+ Map<String, String> inputMap = new HashMap<>();
+ inputMap.put("input1", "description1");
+ inputMap.put("input2", "description2");
+ metadata.setInputs(inputMap);
+
+ staticValidator.validateSlangFile(new File(getClass().getResource("/no_dependencies/empty_flow.sl").toURI()),
+ newExecutable, metadata, true);
}
@Test | ['cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/validation/StaticValidatorTest.java', 'cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/validation/StaticValidatorImpl.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 676,729 | 129,982 | 17,082 | 196 | 312 | 69 | 4 | 1 | 446 | 53 | 124 | 11 | 0 | 1 | 1970-01-01T00:24:34 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,019 | cloudslang/cloud-slang/957/956 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/956 | https://github.com/CloudSlang/cloud-slang/pull/957 | https://github.com/CloudSlang/cloud-slang/pull/957 | 1 | fixes | CLI compile folders | `compile dir_with_no_sl_files` --> NPE
compile description says `--f ...` <-- should be updated to `--d` | 542fca98f64398385726a81f29ce55419d206129 | d611268b66a76eb27258717238591ed7529f497a | https://github.com/cloudslang/cloud-slang/compare/542fca98f64398385726a81f29ce55419d206129...d611268b66a76eb27258717238591ed7529f497a | diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCli.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCli.java
index f1eba947f..fcd1360f7 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCli.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCli.java
@@ -23,17 +23,6 @@ import io.cloudslang.lang.runtime.events.LanguageEventData;
import io.cloudslang.score.events.EventConstants;
import io.cloudslang.score.events.ScoreEvent;
import io.cloudslang.score.events.ScoreEventListener;
-
-import java.io.File;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang.time.StopWatch;
@@ -46,6 +35,15 @@ import org.springframework.shell.core.annotation.CliOption;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
+import java.io.File;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
/**
* @author lesant
@@ -150,9 +148,11 @@ public class SlangCli implements CommandMarker {
@CliCommand(value = "compile", help = "Display compile errors for an executable")
public String compileSource(
- @CliOption(key = {"", "d", "directory"}, mandatory = false, help = FILE_HELP)
+ @CliOption(key = {"", "d", "directory"}, mandatory = false,
+ help = "Path to directory. e.g. compile --d c:/.../your_directory")
final List<String> directories,
- @CliOption(key = {"", "f", "file"}, mandatory = false, help = FILE_HELP) final File file,
+ @CliOption(key = {"", "f", "file"}, mandatory = false,
+ help = "Path to filename. e.g. compile --f c:/.../your_flow.sl") final File file,
@CliOption(key = {"cp", "classpath"}, mandatory = false, help = CLASSPATH_HELP)
final List<String> classPath
) {
@@ -169,17 +169,21 @@ public class SlangCli implements CommandMarker {
}
private String printAllCompileErrors(List<CompilationModellingResult> results) {
- StringBuilder stringBuilder = new StringBuilder();
- for (CompilationModellingResult result : results) {
- printCompileErrors(result.getErrors(), result.getFile(), stringBuilder);
- stringBuilder.append(System.lineSeparator());
+ if (results.size() > 0) {
+ StringBuilder stringBuilder = new StringBuilder();
+ for (CompilationModellingResult result : results) {
+ printCompileErrors(result.getErrors(), result.getFile(), stringBuilder);
+ stringBuilder.append(System.lineSeparator());
+ }
+ return stringBuilder.toString();
+ } else {
+ return "No files were found to compile.";
}
- return stringBuilder.toString();
}
private String printCompileErrors(List<RuntimeException> exceptions, File file, StringBuilder stringBuilder) {
if (exceptions.size() > 0) {
- stringBuilder.append("Following exceptions were found:" + System.lineSeparator());
+ stringBuilder.append("Following exceptions were found:").append(System.lineSeparator());
for (RuntimeException exception : exceptions) {
stringBuilder.append("\\t");
stringBuilder.append(exception.getClass());
@@ -189,7 +193,7 @@ public class SlangCli implements CommandMarker {
}
throw new RuntimeException(stringBuilder.toString());
} else {
- stringBuilder.append("Compilation was successful for " + file.getName());
+ stringBuilder.append("Compilation was successful for ").append(file.getName());
}
return StringUtils.trim(stringBuilder.toString());
}
@@ -215,7 +219,7 @@ public class SlangCli implements CommandMarker {
if (CollectionUtils.isEmpty(systemProperties)) {
stringBuilder.append("No system properties found.");
} else {
- stringBuilder.append("Following system properties were loaded:" + System.lineSeparator());
+ stringBuilder.append("Following system properties were loaded:").append(System.lineSeparator());
for (SystemProperty systemProperty : systemProperties) {
stringBuilder.append("\\t");
stringBuilder.append(systemProperty.getFullyQualifiedName());
diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/ConsolePrinterImpl.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/ConsolePrinterImpl.java
index b7bd94af8..e2a02703b 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/ConsolePrinterImpl.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/ConsolePrinterImpl.java
@@ -48,7 +48,9 @@ public class ConsolePrinterImpl implements ConsolePrinter, DisposableBean {
@Override
public synchronized void waitForAllPrintTasksToFinish() {
try {
- lastTask.get(1, TimeUnit.MINUTES);
+ if (lastTask != null) {
+ lastTask.get(1, TimeUnit.MINUTES);
+ }
} catch (InterruptedException | ExecutionException | TimeoutException ignore) {
}
}
diff --git a/cloudslang-cli/src/test/java/io/cloudslang/lang/cli/services/ConsolePrinterImplTest.java b/cloudslang-cli/src/test/java/io/cloudslang/lang/cli/services/ConsolePrinterImplTest.java
index f13ee6c0f..a40846279 100644
--- a/cloudslang-cli/src/test/java/io/cloudslang/lang/cli/services/ConsolePrinterImplTest.java
+++ b/cloudslang-cli/src/test/java/io/cloudslang/lang/cli/services/ConsolePrinterImplTest.java
@@ -42,6 +42,7 @@ import static org.mockito.Mockito.when;
public class ConsolePrinterImplTest {
private static final String SINGLE_THREAD_EXECUTOR = "singleThreadExecutor";
+ public static final String LAST_TASK = "lastTask";
@InjectMocks
@Spy
@@ -96,6 +97,21 @@ public class ConsolePrinterImplTest {
verify(lastTask, times(1)).get(1, TimeUnit.MINUTES);
}
+ @Test
+ public void testNullPointerExceptionNotThrown() throws NoSuchFieldException, IllegalAccessException {
+ ConsolePrinterImpl consolePrinter = new ConsolePrinterImpl();
+ consolePrinter.initialize();
+
+ Class<? extends ConsolePrinterImpl> consolePrinterClass = consolePrinter.getClass();
+ Field consolePrinterClassDeclaredField = consolePrinterClass.getDeclaredField(LAST_TASK);
+
+ consolePrinterClassDeclaredField.setAccessible(true);
+ Object lastTask = consolePrinterClassDeclaredField.get(consolePrinter);
+ assertNull(lastTask);
+
+ consolePrinter.waitForAllPrintTasksToFinish();
+ }
+
@Test
public void testConsolePrint() throws Exception {
final List<Runnable> runnableList = new ArrayList<>(); | ['cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/ConsolePrinterImpl.java', 'cloudslang-cli/src/test/java/io/cloudslang/lang/cli/services/ConsolePrinterImplTest.java', 'cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCli.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 848,811 | 157,855 | 20,929 | 223 | 2,494 | 458 | 50 | 2 | 107 | 15 | 29 | 3 | 0 | 0 | 1970-01-01T00:24:39 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,026 | cloudslang/cloud-slang/479/331 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/331 | https://github.com/CloudSlang/cloud-slang/pull/479 | https://github.com/CloudSlang/cloud-slang/pull/479 | 1 | fixes | CLI log: Missing CALL_ARGUMENTS for Python action [1] | Observed the following in CLI log:
Java action - call arguments logged:
```
STEP_TYPE=ACTION, STEP_NAME=null, TYPE=EVENT_ACTION_START, CALL_ARGUMENTS={characterSet=UTF-8, agentForwarding=null, command=docker ps -a, timeout=600000, password=null, pty=false, privateKeyFile=c:/.../id_rsa, port=22, host=111.111.111.111, arguments=null, closeSession=false, username=core}
```
Python action - call arguments not logged:
```
STEP_TYPE=ACTION, STEP_NAME=null, TYPE=EVENT_ACTION_START, CALL_ARGUMENTS={}
```
however there are inputs declared in the operation.
Shouldn't call arguments appear in the log also for Python actions (EVENT_ACTION_START events)?
| b57a401b31e480701af163aec446c0dfb0c6ede2 | c1ec95f59f4c904eed6bd03915a553d1bfbac616 | https://github.com/cloudslang/cloud-slang/compare/b57a401b31e480701af163aec446c0dfb0c6ede2...c1ec95f59f4c904eed6bd03915a553d1bfbac616 | diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
index 41527a5ae..a35b5371a 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
@@ -152,4 +152,9 @@ public class LanguageEventData extends HashMap<String, Serializable> {
public void setAsyncLoopBoundExpression(List<Serializable> asyncLoopBoundExpression) {
put(BOUND_ASYNC_LOOP_EXPRESSION, (Serializable) asyncLoopBoundExpression);
}
+
+ public Map<String, Serializable> getCallArguments() {return (Map<String, Serializable>) get(CALL_ARGUMENTS);}
+
+ public void setCallArguments(Map<String, Serializable> callArguments) {put(CALL_ARGUMENTS, (Serializable) callArguments);
+ }
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
index 26d305fa5..18b57da47 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
@@ -17,11 +17,12 @@ import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.runtime.env.ReturnValues;
import io.cloudslang.lang.runtime.env.RunEnvironment;
import io.cloudslang.lang.runtime.events.LanguageEventData;
+import io.cloudslang.score.api.execution.ExecutionParametersConsts;
+import io.cloudslang.score.lang.ExecutionRuntimeServices;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.log4j.Logger;
-import io.cloudslang.score.api.execution.ExecutionParametersConsts;
-import io.cloudslang.score.lang.ExecutionRuntimeServices;
import org.python.core.*;
import org.python.util.PythonInterpreter;
import org.springframework.beans.factory.annotation.Autowired;
@@ -30,11 +31,7 @@ import org.springframework.stereotype.Component;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import static io.cloudslang.score.api.execution.ExecutionParametersConsts.EXECUTION_RUNTIME_SERVICES;
@@ -63,10 +60,16 @@ public class ActionSteps extends AbstractSteps {
Map<String, Serializable> returnValue = new HashMap<>();
Map<String, Serializable> callArguments = runEnv.removeCallArguments();
+ Map<String, Serializable> callArgumentsDeepCopy = new HashMap<>();
+
+ for (Map.Entry<String, Serializable> entry : callArguments.entrySet()) {
+ callArgumentsDeepCopy.put(entry.getKey(), SerializationUtils.clone(entry.getValue()));
+ }
+
Map<String, SerializableSessionObject> serializableSessionData = runEnv.getSerializableDataMap();
fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_ACTION_START, "Preparing to run action " + actionType,
runEnv.getExecutionPath().getParentPath(), LanguageEventData.StepType.ACTION, null,
- Pair.of(LanguageEventData.CALL_ARGUMENTS, (Serializable) callArguments));
+ Pair.of(LanguageEventData.CALL_ARGUMENTS, (Serializable) callArgumentsDeepCopy));
try {
switch (actionType) {
case JAVA:
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
index 721c7fa60..ec74928df 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
@@ -15,6 +15,7 @@ import com.hp.oo.sdk.content.plugin.SerializableSessionObject;
import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.runtime.env.RunEnvironment;
import io.cloudslang.lang.runtime.env.ReturnValues;
+import io.cloudslang.lang.runtime.events.LanguageEventData;
import org.junit.Assert;
import io.cloudslang.score.api.execution.ExecutionParametersConsts;
import io.cloudslang.score.events.ScoreEvent;
@@ -135,6 +136,68 @@ public class ActionStepsTest {
Assert.assertNotNull(actionEndEvent);
}
+ @Test
+ public void doActionPythonActionCheckCallArgumentsOnEvent() {
+ //prepare doAction arguments
+ RunEnvironment runEnv = new RunEnvironment();
+ ExecutionRuntimeServices runtimeServices = new ExecutionRuntimeServices();
+
+ Map<String, Serializable> callArguments = new HashMap<>();
+ callArguments.put("index", 1);
+ runEnv.putCallArguments(callArguments);
+
+ String userPythonScript = "var= \\"hello\\"";
+
+ //invoke doAction
+ actionSteps.doAction(runEnv,new HashMap<String, Object>(), PYTHON, "", "", runtimeServices, userPythonScript, 2L, "operationName");
+
+ Collection<ScoreEvent> events = runtimeServices.getEvents();
+
+ Assert.assertFalse(events.isEmpty());
+ ScoreEvent eventActionStart = null;
+ for(ScoreEvent event:events){
+ if(event.getEventType().equals(ScoreLangConstants.EVENT_ACTION_START)){
+ eventActionStart = event;
+ break;
+ }
+ }
+
+ Assert.assertNotNull(eventActionStart);
+ LanguageEventData data = (LanguageEventData)eventActionStart.getData();
+ Map<String, Serializable> actualCallArguments = data.getCallArguments();
+ Assert.assertEquals("Python action call arguments are not as expected", callArguments, actualCallArguments);
+ }
+
+ @Test
+ public void doActionJavaActionCheckCallArgumentsOnEvent() {
+ //prepare doAction arguments
+ RunEnvironment runEnv = new RunEnvironment();
+ ExecutionRuntimeServices runtimeServices = new ExecutionRuntimeServices();
+
+ Map<String, Serializable> callArguments = new HashMap<>();
+ callArguments.put("index", 1);
+ runEnv.putCallArguments(callArguments);
+
+ //invoke doAction
+ actionSteps.doAction(runEnv, new HashMap<String, Object>(), JAVA, ContentTestActions.class.getName(), "doJavaSampleAction", runtimeServices, null, 2L, "operationName");
+
+ Collection<ScoreEvent> events = runtimeServices.getEvents();
+
+ Assert.assertFalse(events.isEmpty());
+ ScoreEvent eventActionStart = null;
+ for(ScoreEvent event:events){
+ if(event.getEventType().equals(ScoreLangConstants.EVENT_ACTION_START)){
+ eventActionStart = event;
+ break;
+ }
+ }
+
+ Assert.assertNotNull(eventActionStart);
+ LanguageEventData data = (LanguageEventData)eventActionStart.getData();
+ Map<String, Serializable> actualCallArguments = data.getCallArguments();
+ Assert.assertEquals("Java action call arguments are not as expected", callArguments, actualCallArguments);
+ }
+
@Test(expected = RuntimeException.class, timeout = DEFAULT_TIMEOUT)
public void doJavaActionWrongMethodTest() {
//prepare doAction arguments | ['cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 349,946 | 67,594 | 8,981 | 105 | 1,176 | 225 | 24 | 2 | 655 | 65 | 161 | 18 | 0 | 2 | 1970-01-01T00:24:07 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,018 | cloudslang/cloud-slang/1005/1004 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/1004 | https://github.com/CloudSlang/cloud-slang/pull/1005 | https://github.com/CloudSlang/cloud-slang/pull/1005 | 1 | fixes | Builder - file name not printed | https://travis-ci.org/CloudSlang/cloud-slang-content/builds/238929220?utm_source=github_status&utm_medium=notification
```console
02/06/17 22:05:30 [INFO] Test case passed: testRemoteSecureCopySucessLocalToRemote [/home/travis/build/CloudSlang/cloud-slang-content/test/io/cloudslang/base/remote_file_transfer/remote_secure_copy.inputs.yaml]. Finished running: io.cloudslang.base.remote_file_transfer.test_remote_secure_copy_local_to_remote with result: SUCCESS
02/06/17 22:05:30 [ERROR]
02/06/17 22:05:30 [ERROR] ------------------------------------------------------------
02/06/17 22:05:30 [ERROR] Exception: Error at line [13] - There should be an empty line between two sections of different tags (@description and @input)
02/06/17 22:05:30 [ERROR] Exception: Error at line [13] - There should be an empty line between two sections of different tags (@description and @input)
02/06/17 22:05:30 [ERROR] Exception: Error at line [11] - There should be an empty line between two sections of different tags (@description and @input)
02/06/17 22:05:30 [ERROR] Exception: Error at line [12] - There should be an empty line between two sections of different tags (@description and @input)
02/06/17 22:05:30 [ERROR] FAILURE: Validation of slang files for project: "/home/travis/build/CloudSlang/cloud-slang-content" failed.
02/06/17 22:05:30 [ERROR] ---------------
```
which file? | bbb20fab5c5de83d724d3ec3f2c0adf770427078 | bb66ee6910987996d31da7be7bf44747ed6200ba | https://github.com/cloudslang/cloud-slang/compare/bbb20fab5c5de83d724d3ec3f2c0adf770427078...bb66ee6910987996d31da7be7bf44747ed6200ba | diff --git a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/verifier/SlangContentVerifier.java b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/verifier/SlangContentVerifier.java
index f1abd557e..977a97bdb 100644
--- a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/verifier/SlangContentVerifier.java
+++ b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/verifier/SlangContentVerifier.java
@@ -24,9 +24,11 @@ import io.cloudslang.lang.tools.build.validation.MetadataMissingException;
import io.cloudslang.lang.tools.build.validation.StaticValidator;
import java.io.File;
import java.util.ArrayDeque;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
@@ -70,21 +72,27 @@ public class SlangContentVerifier {
loggingService.logEvent(Level.INFO, slangFiles.size() + " .sl files were found");
loggingService.logEvent(Level.INFO, "");
Queue<RuntimeException> exceptions = new ArrayDeque<>();
+ String errorMessagePrefixMetadata = "";
for (File slangFile: slangFiles) {
Executable sourceModel = null;
try {
+ errorMessagePrefixMetadata = "Failed to extract metadata for file: \\'" +
+ slangFile.getAbsoluteFile() + "\\'.\\n";
+ String errorMessagePrefixCompilation = "Failed to compile file: \\'" +
+ slangFile.getAbsoluteFile() + "\\'.\\n";
+
Validate.isTrue(slangFile.isFile(), "file path \\'" + slangFile.getAbsolutePath() +
"\\' must lead to a file");
SlangSource slangSource = SlangSource.fromFile(slangFile);
ExecutableModellingResult preCompileResult = slangCompiler.preCompileSource(slangSource);
sourceModel = preCompileResult.getExecutable();
- exceptions.addAll(preCompileResult.getErrors());
+ exceptions.addAll(prependPrefix(preCompileResult.getErrors(), errorMessagePrefixCompilation));
MetadataModellingResult metadataResult = metadataExtractor
.extractMetadataModellingResult(slangSource, shouldValidateCheckstyle);
Metadata sourceMetadata = metadataResult.getMetadata();
- exceptions.addAll(metadataResult.getErrors());
+ exceptions.addAll(prependPrefix(metadataResult.getErrors(), errorMessagePrefixMetadata));
if (sourceModel != null) {
int size = exceptions.size();
@@ -95,8 +103,7 @@ public class SlangContentVerifier {
}
}
} catch (Exception e) {
- String errorMessage = "Failed to extract metadata for file: \\'" +
- slangFile.getAbsoluteFile() + "\\'.\\n" + e.getMessage();
+ String errorMessage = errorMessagePrefixMetadata + e.getMessage();
loggingService.logEvent(Level.ERROR, errorMessage);
exceptions.add(new RuntimeException(errorMessage, e));
if (e instanceof MetadataMissingException && sourceModel != null) {
@@ -115,6 +122,18 @@ public class SlangContentVerifier {
return preCompileResult;
}
+ private Collection<RuntimeException> prependPrefix(Collection<RuntimeException> errors, String prefix) {
+ List<RuntimeException> result = new ArrayList<>();
+ for (RuntimeException ex : errors) {
+ result.add(wrapWithPrefix(ex, prefix));
+ }
+ return result;
+ }
+
+ private RuntimeException wrapWithPrefix(RuntimeException rex, String prefix) {
+ return new RuntimeException(prefix + rex.getMessage(), rex);
+ }
+
public CompileResult compileSlangModels(Map<String, Executable> slangModels) {
CompileResult compileResult = new CompileResult();
Map<String, CompilationArtifact> compiledArtifacts = new HashMap<>(); | ['cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/verifier/SlangContentVerifier.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 929,759 | 172,108 | 23,055 | 238 | 1,473 | 249 | 27 | 1 | 1,393 | 138 | 368 | 15 | 1 | 1 | 1970-01-01T00:24:58 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,028 | cloudslang/cloud-slang/419/413 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/413 | https://github.com/CloudSlang/cloud-slang/pull/419 | https://github.com/CloudSlang/cloud-slang/pull/419 | 1 | fixes | read_from_file operation context serialization error | If score tries to create execution message during this operation execution (for example if isRunningTooLong), it fails because PyFile object, which is serializable itself ,has field file:TextIOWrapper which is not serializable.
``` bash
(SimpleExecutionRunnable.java:126) ERROR - Error during execution!!!
java.lang.RuntimeException: Failed to serialize execution plan. Error:
at io.cloudslang.engine.queue.entities.ExecutionMessageConverter.objToBytes(ExecutionMessageConverter.java:100)
at io.cloudslang.engine.queue.entities.ExecutionMessageConverter.createPayload(ExecutionMessageConverter.java:45)
at io.cloudslang.engine.queue.entities.ExecutionMessageConverter.createPayload(ExecutionMessageConverter.java:41)
at io.cloudslang.worker.management.services.SimpleExecutionRunnable.createInProgressExecutionMessage(SimpleExecutionRunnable.java:387)
at io.cloudslang.worker.management.services.SimpleExecutionRunnable.isRunningTooLong(SimpleExecutionRunnable.java:330)
at io.cloudslang.worker.management.services.SimpleExecutionRunnable.shouldStop(SimpleExecutionRunnable.java:175)
at io.cloudslang.worker.management.services.SimpleExecutionRunnable.executeRegularStep(SimpleExecutionRunnable.java:161)
at io.cloudslang.worker.management.services.SimpleExecutionRunnable.run(SimpleExecutionRunnable.java:119)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at io.cloudslang.worker.management.services.WorkerThreadFactory$1.run(WorkerThreadFactory.java:33)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.NotSerializableException: org.python.core.io.TextIOWrapper
```
| b5d7bcc04c986238d85bf691b32970236d991263 | af352cae13eb5354b2059840809ad95564688770 | https://github.com/cloudslang/cloud-slang/compare/b5d7bcc04c986238d85bf691b32970236d991263...af352cae13eb5354b2059840809ad95564688770 | diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
index cdad7ad78..bc2912ec8 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
@@ -25,9 +25,13 @@ import io.cloudslang.score.lang.ExecutionRuntimeServices;
import org.python.core.Py;
import org.python.core.PyBoolean;
import org.python.core.PyException;
+import org.python.core.PyFile;
+import org.python.core.PyFunction;
import org.python.core.PyModule;
import org.python.core.PyObject;
import org.python.core.PyStringMap;
+import org.python.core.PySystemState;
+import org.python.core.PyType;
import org.python.util.PythonInterpreter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -36,6 +40,7 @@ import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@@ -258,7 +263,7 @@ public class ActionSteps extends AbstractSteps {
while (localsIterator.hasNext()) {
String key = localsIterator.next().asString();
PyObject value = interpreter.get(key);
- if ((key.startsWith("__") && key.endsWith("__")) || value instanceof PyModule) {
+ if (keyIsExcluded(key, value)) {
continue;
}
Serializable javaValue = resolveJythonObjectToJava(value);
@@ -268,6 +273,14 @@ public class ActionSteps extends AbstractSteps {
return returnValue;
}
+ private boolean keyIsExcluded(String key, PyObject value) {
+ return (key.startsWith("__") && key.endsWith("__")) ||
+ value instanceof PyFile ||
+ value instanceof PyModule ||
+ value instanceof PyFunction ||
+ value instanceof PySystemState;
+ }
+
private Serializable resolveJythonObjectToJava(PyObject value) {
if (value instanceof PyBoolean) {
PyBoolean pyBoolean = (PyBoolean) value;
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
index e18581a81..1caa89773 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
@@ -19,7 +19,9 @@ import org.junit.Assert;
import io.cloudslang.score.api.execution.ExecutionParametersConsts;
import io.cloudslang.score.events.ScoreEvent;
import io.cloudslang.score.lang.ExecutionRuntimeServices;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.python.util.PythonInterpreter;
import org.springframework.beans.factory.annotation.Autowired;
@@ -28,6 +30,8 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import java.io.File;
+import java.io.IOException;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
@@ -48,6 +52,9 @@ public class ActionStepsTest {
private static final long DEFAULT_TIMEOUT = 10000;
+ @Rule
+ public TemporaryFolder folder = new TemporaryFolder();
+
@Autowired
private ActionSteps actionSteps;
ExecutionRuntimeServices executionRuntimeServicesMock = mock(ExecutionRuntimeServices.class);
@@ -465,6 +472,32 @@ public class ActionStepsTest {
Assert.assertEquals("Python action outputs are not as expected", expectedOutputs, actualOutputs);
}
+ @Test
+ public void doActionInvalidReturnTypes() throws IOException {
+ //prepare doAction arguments
+ RunEnvironment runEnv = new RunEnvironment();
+ File file = folder.newFile();
+ String userPythonScript =
+ "valid = 1\\n" +
+ "with open('" + file.getAbsolutePath() + "', 'r') as f:\\n" +
+ " f.close()\\n\\n" +
+ "import sys\\n" +
+ "import io\\n" +
+ "def a():\\n" +
+ " print 'a'\\n\\n";
+
+ //invoke doAction
+ actionSteps.doAction(runEnv, new HashMap<String, Object>(), PYTHON, "", "", executionRuntimeServicesMock, userPythonScript, 2L);
+
+ //extract actual outputs
+ ReturnValues actualReturnValues = runEnv.removeReturnValues();
+ Map<String, Serializable> actualOutputs = actualReturnValues.getOutputs();
+
+ //verify matching
+ Assert.assertEquals("Invalid types passed exclusion",
+ 1, actualOutputs.size());
+ }
+
@Test (expected = RuntimeException.class, timeout = DEFAULT_TIMEOUT)
public void doActionPythonMissingInputsTest() {
//prepare doAction arguments | ['cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 339,540 | 65,748 | 8,746 | 104 | 629 | 127 | 15 | 1 | 1,894 | 79 | 386 | 22 | 0 | 1 | 1970-01-01T00:23:59 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,029 | cloudslang/cloud-slang/416/406 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/406 | https://github.com/CloudSlang/cloud-slang/pull/416 | https://github.com/CloudSlang/cloud-slang/pull/416 | 1 | fixes | Python action - Cannot import svn.local | When trying to import in an action python script, we get:
```
Command failed java.lang.RuntimeException: Slang Error : Error executing python script: Traceback (most recent call last):
File "<string>", line 2, in <module>
File "C:\\Users\\stoneo\\Desktop\\Cslang-0.8-rc-1\\cslang\\python-lib\\svn\\local.py", line 4, in <module>
import svn.common
File "C:\\Users\\stoneo\\Desktop\\Cslang-0.8-rc-1\\cslang\\python-lib\\svn\\common.py", line 4, in <module>
import dateutil.parser
File "C:\\Users\\stoneo\\Desktop\\Cslang-0.8-rc-1\\cslang\\python-lib\\dateutil\\parser.py", line 22, in <module>
from io import StringIO
ImportError: cannot import name StringIO
```
If we first import in our script StringIO from io nmaually, it works:
``` python
from io import StringIO
import svn.local
```
| 031bd28395ebde1bb8817f82daea8a0fe3e51585 | 564f2613037e39cb8dbcbdfc43abec2adb4817db | https://github.com/cloudslang/cloud-slang/compare/031bd28395ebde1bb8817f82daea8a0fe3e51585...564f2613037e39cb8dbcbdfc43abec2adb4817db | diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/configuration/SlangRuntimeSpringConfig.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/configuration/SlangRuntimeSpringConfig.java
index e624944e9..511ddabab 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/configuration/SlangRuntimeSpringConfig.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/configuration/SlangRuntimeSpringConfig.java
@@ -30,7 +30,10 @@ public class SlangRuntimeSpringConfig {
@Bean
public PythonInterpreter interpreter(){
- return new PythonInterpreter();
+ PythonInterpreter interpreter = new PythonInterpreter();
+// here to avoid jython preferring io.cloudslang package over python io package
+ interpreter.exec("import io");
+ return interpreter;
}
@Bean
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
index e18581a81..4a5d81735 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java
@@ -489,6 +489,23 @@ public class ActionStepsTest {
actionSteps.doAction(runEnv, nonSerializableExecutionData, PYTHON, "", "", executionRuntimeServicesMock, userPythonScript, 2L);
}
+
+ @Test
+ public void doActionPythonImportRightIOPackage() {
+ //prepare doAction arguments
+ RunEnvironment runEnv = new RunEnvironment();
+
+ String userPythonScript =
+ "import io\\n" +
+ "if 'StringIO' not in dir(io):\\n" +
+ " raise Exception('cant find StringIO')";
+
+ //invoke doAction
+ actionSteps.doAction(
+ runEnv, new HashMap<String, Object>(), PYTHON, "", "",
+ executionRuntimeServicesMock, userPythonScript, 2L);
+ }
+
@Test (expected = RuntimeException.class, timeout = DEFAULT_TIMEOUT)
public void doActionPythonInputTypeMismatchTest() {
//prepare doAction arguments | ['cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ActionStepsTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/configuration/SlangRuntimeSpringConfig.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 339,318 | 65,701 | 8,740 | 104 | 263 | 44 | 5 | 1 | 798 | 87 | 238 | 21 | 0 | 2 | 1970-01-01T00:23:59 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,024 | cloudslang/cloud-slang/500/499 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/499 | https://github.com/CloudSlang/cloud-slang/pull/500 | https://github.com/CloudSlang/cloud-slang/pull/500 | 1 | fixes | Double title 'Flow outputs' | The behavior of the "Flow outputs" messages are changed in CLI. The flow outputs were grouped as a single block. Now we have 'Flow outputs' title for each output:
E.g.:
Flow outputs:
- return_result = Reservation{region=us-east-1, reservationId=r-4cfb859b, instances=[RunningInstance{region=us-east...
Flow outputs:
- return_code = 0
| 956e27184a41ab38ac1d9ddb3c8d2fd1c5705900 | fcff612803b0c6d77d499f3a7eb19be3044ae30e | https://github.com/cloudslang/cloud-slang/compare/956e27184a41ab38ac1d9ddb3c8d2fd1c5705900...fcff612803b0c6d77d499f3a7eb19be3044ae30e | diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
index 9d2e636e9..160497ad7 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
@@ -107,8 +107,8 @@ public class SyncTriggerEventListener implements ScoreEventListener{
&& data.containsKey(LanguageEventData.PATH)
&& data.get(LanguageEventData.PATH).equals(EXEC_START_PATH)) {
Map<String, Serializable> outputs = extractNotEmptyOutputs(data);
+ printWithColor(Ansi.Color.WHITE, "\\nFlow outputs:");
for (String key : outputs.keySet()) {
- printWithColor(Ansi.Color.WHITE, "\\nFlow outputs:");
printWithColor(Ansi.Color.WHITE, "- " + key + " = " + outputs.get(key));
}
} | ['cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 352,950 | 68,174 | 9,053 | 106 | 151 | 30 | 2 | 1 | 337 | 44 | 87 | 9 | 0 | 0 | 1970-01-01T00:24:08 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,023 | cloudslang/cloud-slang/521/510 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/510 | https://github.com/CloudSlang/cloud-slang/pull/521 | https://github.com/CloudSlang/cloud-slang/pull/521 | 1 | fixes | Entity references in logs | some entities still appear as obj refs (e.g. `io.cloudslang.lang.entities.ResultNavigation@5b7d`) instead of toString representation
| 4586755a44ef9dde8952ad3bf42c2866a024d651 | 503644bc56bceca49977883ace69d95586c1c673 | https://github.com/cloudslang/cloud-slang/compare/4586755a44ef9dde8952ad3bf42c2866a024d651...503644bc56bceca49977883ace69d95586c1c673 | diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/AsyncLoopStatement.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/AsyncLoopStatement.java
index 97972aa9f..2cebc6016 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/AsyncLoopStatement.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/AsyncLoopStatement.java
@@ -12,6 +12,7 @@ package io.cloudslang.lang.entities;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.Serializable;
@@ -43,26 +44,33 @@ public class AsyncLoopStatement extends LoopStatement implements Serializable {
return varName;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .appendSuper(super.toString())
+ .append("varName", varName)
+ .toString();
+ }
+
@Override
public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
AsyncLoopStatement that = (AsyncLoopStatement) o;
return new EqualsBuilder()
+ .appendSuper(super.equals(o))
.append(varName, that.varName)
- .append(getExpression(), that.getExpression())
.isEquals();
}
@Override
public int hashCode() {
- return new HashCodeBuilder()
+ return new HashCodeBuilder(17, 37)
+ .appendSuper(super.hashCode())
.append(varName)
- .append(getExpression())
.toHashCode();
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/CompilationArtifact.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/CompilationArtifact.java
index fb1b9791e..aaf92430a 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/CompilationArtifact.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/CompilationArtifact.java
@@ -8,10 +8,11 @@
*/
package io.cloudslang.lang.entities;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
import io.cloudslang.lang.entities.bindings.Input;
import io.cloudslang.score.api.ExecutionPlan;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.util.Collection;
import java.util.List;
@@ -50,14 +51,39 @@ public class CompilationArtifact {
return systemProperties;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("executionPlan", executionPlan)
+ .append("dependencies", dependencies)
+ .append("inputs", inputs)
+ .append("systemProperties", systemProperties)
+ .toString();
+ }
+
@Override
public boolean equals(Object o) {
- return EqualsBuilder.reflectionEquals(this, o);
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ CompilationArtifact that = (CompilationArtifact) o;
+
+ return new EqualsBuilder()
+ .append(executionPlan, that.executionPlan)
+ .append(dependencies, that.dependencies)
+ .append(inputs, that.inputs)
+ .append(systemProperties, that.systemProperties)
+ .isEquals();
}
@Override
public int hashCode() {
- return HashCodeBuilder.reflectionHashCode(this);
+ return new HashCodeBuilder(17, 37)
+ .append(executionPlan)
+ .append(dependencies)
+ .append(inputs)
+ .append(systemProperties)
+ .toHashCode();
}
-
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ListForLoopStatement.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ListForLoopStatement.java
index 49c723b56..57229ff07 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ListForLoopStatement.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ListForLoopStatement.java
@@ -12,6 +12,7 @@ package io.cloudslang.lang.entities;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.Serializable;
@@ -45,26 +46,33 @@ public class ListForLoopStatement extends LoopStatement implements Serializable
return varName;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .appendSuper(super.toString())
+ .append("varName", varName)
+ .toString();
+ }
+
@Override
public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
ListForLoopStatement that = (ListForLoopStatement) o;
return new EqualsBuilder()
+ .appendSuper(super.equals(o))
.append(varName, that.varName)
- .append(getExpression(), that.getExpression())
.isEquals();
}
@Override
public int hashCode() {
- return new HashCodeBuilder()
+ return new HashCodeBuilder(17, 37)
+ .appendSuper(super.hashCode())
.append(varName)
- .append(getExpression())
.toHashCode();
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/LoopStatement.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/LoopStatement.java
index b9995cf80..7c7c8af21 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/LoopStatement.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/LoopStatement.java
@@ -10,6 +10,9 @@
package io.cloudslang.lang.entities;
import org.apache.commons.lang3.Validate;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.Serializable;
@@ -37,4 +40,32 @@ public abstract class LoopStatement implements Serializable {
public String getExpression() {
return expression;
}
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("expression", expression)
+ .toString();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ LoopStatement that = (LoopStatement) o;
+
+ return new EqualsBuilder()
+ .append(expression, that.expression)
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .append(expression)
+ .toHashCode();
+ }
+
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/MapForLoopStatement.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/MapForLoopStatement.java
index a452422fe..72f61fae9 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/MapForLoopStatement.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/MapForLoopStatement.java
@@ -12,6 +12,7 @@ package io.cloudslang.lang.entities;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.Serializable;
@@ -53,28 +54,36 @@ public class MapForLoopStatement extends LoopStatement implements Serializable{
return valueName;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .appendSuper(super.toString())
+ .append("keyName", keyName)
+ .append("valueName", valueName)
+ .toString();
+ }
+
@Override
public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
MapForLoopStatement that = (MapForLoopStatement) o;
return new EqualsBuilder()
+ .appendSuper(super.equals(o))
.append(keyName, that.keyName)
.append(valueName, that.valueName)
- .append(getExpression(), that.getExpression())
.isEquals();
}
@Override
public int hashCode() {
- return new HashCodeBuilder()
+ return new HashCodeBuilder(17, 37)
+ .appendSuper(super.hashCode())
.append(keyName)
.append(valueName)
- .append(getExpression())
.toHashCode();
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ResultNavigation.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ResultNavigation.java
index 301163100..a9776ffe3 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ResultNavigation.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ResultNavigation.java
@@ -11,6 +11,7 @@ package io.cloudslang.lang.entities;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.Serializable;
@@ -42,26 +43,34 @@ public class ResultNavigation implements Serializable {
return this.presetResult;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("nextStepId", nextStepId)
+ .append("presetResult", presetResult)
+ .toString();
+ }
+
@Override
public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
ResultNavigation that = (ResultNavigation) o;
return new EqualsBuilder()
- .append(this.nextStepId, that.nextStepId)
- .append(this.presetResult, that.presetResult)
+ .append(nextStepId, that.nextStepId)
+ .append(presetResult, that.presetResult)
.isEquals();
}
@Override
public int hashCode() {
- return new HashCodeBuilder()
+ return new HashCodeBuilder(17, 37)
.append(nextStepId)
.append(presetResult)
.toHashCode();
}
+
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/InOutParam.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/InOutParam.java
index c4e355dc9..e9a4eefb0 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/InOutParam.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/InOutParam.java
@@ -46,17 +46,32 @@ public abstract class InOutParam implements Serializable {
@Override
public String toString() {
- return ToStringBuilder.reflectionToString(this);
+ return new ToStringBuilder(this)
+ .append("name", name)
+ .append("value", value)
+ .toString();
}
@Override
public boolean equals(Object o) {
- return EqualsBuilder.reflectionEquals(this, o);
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ InOutParam that = (InOutParam) o;
+
+ return new EqualsBuilder()
+ .append(name, that.name)
+ .append(value, that.value)
+ .isEquals();
}
@Override
public int hashCode() {
- return HashCodeBuilder.reflectionHashCode(this);
+ return new HashCodeBuilder(17, 37)
+ .append(name)
+ .append(value)
+ .toHashCode();
}
}
diff --git a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/Input.java b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/Input.java
index 3ab30e1b5..c2fceb80a 100644
--- a/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/Input.java
+++ b/cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/Input.java
@@ -8,6 +8,10 @@
*/
package io.cloudslang.lang.entities.bindings;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+
import java.io.Serializable;
/**
@@ -58,4 +62,43 @@ public class Input extends InOutParam {
return this.systemPropertyName;
}
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .appendSuper(super.toString())
+ .append("encrypted", encrypted)
+ .append("required", required)
+ .append("overridable", overridable)
+ .append("systemPropertyName", systemPropertyName)
+ .toString();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Input input = (Input) o;
+
+ return new EqualsBuilder()
+ .appendSuper(super.equals(o))
+ .append(encrypted, input.encrypted)
+ .append(required, input.required)
+ .append(overridable, input.overridable)
+ .append(systemPropertyName, input.systemPropertyName)
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .appendSuper(super.hashCode())
+ .append(encrypted)
+ .append(required)
+ .append(overridable)
+ .append(systemPropertyName)
+ .toHashCode();
+ }
+
} | ['cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/InOutParam.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/AsyncLoopStatement.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/MapForLoopStatement.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ListForLoopStatement.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/CompilationArtifact.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/bindings/Input.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/LoopStatement.java', 'cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ResultNavigation.java'] | {'.java': 8} | 8 | 8 | 0 | 0 | 8 | 352,938 | 68,170 | 9,053 | 106 | 7,378 | 1,487 | 221 | 8 | 133 | 13 | 31 | 2 | 0 | 0 | 1970-01-01T00:24:10 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,027 | cloudslang/cloud-slang/428/365 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/365 | https://github.com/CloudSlang/cloud-slang/pull/428 | https://github.com/CloudSlang/cloud-slang/pull/428 | 1 | fixes | Sporadic test failures - Travis | SimpleFlowTest.testFlowWithGlobalSession
DataFlowTest.testBindingsFlow
detailed log here: https://gist.github.com/Bonczidai/864f936c36238b5c903b
| 23a53202bd7189b83e508e62679620faf20d22d7 | 3ad2878242beab856579363dddf16d45fc8db2db | https://github.com/cloudslang/cloud-slang/compare/23a53202bd7189b83e508e62679620faf20d22d7...3ad2878242beab856579363dddf16d45fc8db2db | diff --git a/cloudslang-all/src/test/java/io/cloudslang/lang/api/SlangImplTest.java b/cloudslang-all/src/test/java/io/cloudslang/lang/api/SlangImplTest.java
index 4c3951725..81ae20b96 100644
--- a/cloudslang-all/src/test/java/io/cloudslang/lang/api/SlangImplTest.java
+++ b/cloudslang-all/src/test/java/io/cloudslang/lang/api/SlangImplTest.java
@@ -214,7 +214,7 @@ public class SlangImplTest {
private class EventListener implements ScoreEventListener{
@Override
- public void onEvent(ScoreEvent event) throws InterruptedException {
+ public synchronized void onEvent(ScoreEvent event) throws InterruptedException {
}
}
diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCLI.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCLI.java
index 872301ac6..a7ae45c3f 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCLI.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCLI.java
@@ -156,7 +156,7 @@ public class SlangCLI implements CommandMarker {
handlerTypes.add(ScoreLangConstants.EVENT_EXECUTION_FINISHED);
scoreServices.subscribe(new ScoreEventListener() {
@Override
- public void onEvent(ScoreEvent event) {
+ public synchronized void onEvent(ScoreEvent event) {
logEvent(event);
}
}, handlerTypes);
diff --git a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java
index 8efd31825..033a35a48 100644
--- a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java
+++ b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java
@@ -233,7 +233,7 @@ public class SlangBuildMain {
private static void registerEventHandlers(Slang slang) {
slang.subscribeOnAllEvents(new ScoreEventListener() {
@Override
- public void onEvent(ScoreEvent event) {
+ public synchronized void onEvent(ScoreEvent event) {
logEvent(event);
}
});
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
index bad0014bd..fd2aeb7ad 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
@@ -47,7 +47,7 @@ public class TriggerFlows {
final BlockingQueue<ScoreEvent> finishEvent = new LinkedBlockingQueue<>();
ScoreEventListener finishListener = new ScoreEventListener() {
@Override
- public void onEvent(ScoreEvent event) throws InterruptedException {
+ public synchronized void onEvent(ScoreEvent event) throws InterruptedException {
finishEvent.add(event);
}
}; | ['cloudslang-all/src/test/java/io/cloudslang/lang/api/SlangImplTest.java', 'cloudslang-cli/src/main/java/io/cloudslang/lang/cli/SlangCLI.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java', 'cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/SlangBuildMain.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 339,784 | 65,826 | 8,761 | 104 | 236 | 46 | 4 | 2 | 146 | 6 | 44 | 5 | 1 | 0 | 1970-01-01T00:23:59 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,033 | cloudslang/cloud-slang/337/336 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/336 | https://github.com/CloudSlang/cloud-slang/pull/337 | https://github.com/CloudSlang/cloud-slang/pull/337 | 1 | fixes | preCompile API returns input object which contains input name as default value | When the input has required / overridable / encrypted field (the value doesn’t matter), and has no default value, the expression field of Input object contains the input name during the precompile API.
| cfd53e0e24ffc4eb54abe9d98ef4ce7bde9b10a8 | 5e0481fa9206304d125683d486986fcccca879fd | https://github.com/cloudslang/cloud-slang/compare/cfd53e0e24ffc4eb54abe9d98ef4ce7bde9b10a8...5e0481fa9206304d125683d486986fcccca879fd | diff --git a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
index 6c1274a1c..142c0ea26 100644
--- a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
+++ b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java
@@ -68,8 +68,7 @@ public abstract class AbstractInputsTransformer {
(boolean) props.get(OVERRIDABLE_KEY);
boolean defaultSpecified = props.containsKey(DEFAULT_KEY);
String inputName = entry.getKey();
- String expression = defaultSpecified ? props.get(DEFAULT_KEY)
- .toString() : inputName;
+ String expression = defaultSpecified ? props.get(DEFAULT_KEY).toString() : null;
String systemPropertyName = (String) props.get(SYSTEM_PROPERTY_KEY);
if (!overridable && !defaultSpecified && StringUtils.isEmpty(systemPropertyName)) {
diff --git a/cloudslang-compiler/src/test/java/io/cloudslang/lang/compiler/modeller/transformers/InputsTransformerTest.java b/cloudslang-compiler/src/test/java/io/cloudslang/lang/compiler/modeller/transformers/InputsTransformerTest.java
index 3e410d053..7ef3af27f 100644
--- a/cloudslang-compiler/src/test/java/io/cloudslang/lang/compiler/modeller/transformers/InputsTransformerTest.java
+++ b/cloudslang-compiler/src/test/java/io/cloudslang/lang/compiler/modeller/transformers/InputsTransformerTest.java
@@ -105,7 +105,7 @@ public class InputsTransformerTest {
@SuppressWarnings("unchecked") List<Input> inputs = inputTransformer.transform(inputsMap);
Input input = inputs.get(4);
Assert.assertEquals("input5", input.getName());
- Assert.assertEquals("input5", input.getExpression());
+ Assert.assertEquals(null, input.getExpression());
Assert.assertEquals(true, input.isEncrypted());
Assert.assertEquals(true, input.isRequired());
} | ['cloudslang-compiler/src/test/java/io/cloudslang/lang/compiler/modeller/transformers/InputsTransformerTest.java', 'cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/modeller/transformers/AbstractInputsTransformer.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 332,341 | 64,501 | 8,616 | 104 | 238 | 39 | 3 | 1 | 203 | 33 | 42 | 2 | 0 | 0 | 1970-01-01T00:23:55 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,036 | cloudslang/cloud-slang/164/160 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/160 | https://github.com/CloudSlang/cloud-slang/pull/164 | https://github.com/CloudSlang/cloud-slang/pull/164 | 1 | fixes | schedule slang flow fails on inputs passed as unmodifiable collection | - also the run time exception is fired from Slang language and not from score as ScoreEvent.Error
| 2fe8321efdb3adab28f08afa65e2dd4dc98c2600 | 773cd3dee66c7e1ae82ad35e969feea7c03f734f | https://github.com/cloudslang/cloud-slang/compare/2fe8321efdb3adab28f08afa65e2dd4dc98c2600...773cd3dee66c7e1ae82ad35e969feea7c03f734f | diff --git a/score-lang-api/src/main/java/org/openscore/lang/api/SlangImpl.java b/score-lang-api/src/main/java/org/openscore/lang/api/SlangImpl.java
index e1757282e..f81201970 100644
--- a/score-lang-api/src/main/java/org/openscore/lang/api/SlangImpl.java
+++ b/score-lang-api/src/main/java/org/openscore/lang/api/SlangImpl.java
@@ -80,7 +80,8 @@ public class SlangImpl implements Slang {
Map<String, Serializable> executionContext = new HashMap<>();
RunEnvironment runEnv = new RunEnvironment(systemProperties);
executionContext.put(ScoreLangConstants.RUN_ENV, runEnv);
- executionContext.put(ScoreLangConstants.USER_INPUTS_KEY, (Serializable)runInputs);
+ Map<String, ? extends Serializable> clonedRunInputs = new HashMap<>(runInputs);
+ executionContext.put(ScoreLangConstants.USER_INPUTS_KEY, (Serializable) clonedRunInputs);
TriggeringProperties triggeringProperties = TriggeringProperties.create(compilationArtifact.getExecutionPlan()).setDependencies(compilationArtifact.getDependencies())
.setContext(executionContext);
return score.trigger(triggeringProperties); | ['score-lang-api/src/main/java/org/openscore/lang/api/SlangImpl.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 248,786 | 48,203 | 6,507 | 88 | 268 | 59 | 3 | 1 | 98 | 17 | 21 | 2 | 0 | 0 | 1970-01-01T00:23:46 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,030 | cloudslang/cloud-slang/361/339 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/339 | https://github.com/CloudSlang/cloud-slang/pull/361 | https://github.com/CloudSlang/cloud-slang/pull/361 | 1 | fixes | Failed to compile flow which uses subflow located in file in the same folder | Let's say we have flow flow_system_properties_subflow.sl in flow_system_properties_subflow.sl which uses simplecp.flows.flow_system_properties_subflow_level1 which located in file simplecp.flows.flow_system_properties_subflow_level1.sl located in the same folder
When executing cli with command
run --f c:/Temp/slang/tests/SimpleCp/flow_system_properties_subflow.sl --cp c:/Temp/slang/tests
We get error
Command failed java.lang.RuntimeException: java.lang.RuntimeException: Reference: 'simplecp.flows.flow_system_properties_subflow_level1' in executable: 'flow_system_properties_subflow', wasn't found in path
When executing command with specifying --cp with flow path
run --f c:/Temp/slang/tests/SimpleCp/flow_system_properties_subflow.sl --cp c:/Temp/slang/tests/SimpleCp
We get error
Command failed java.lang.RuntimeException: java.lang.IllegalArgumentException: Source flow_system_properties_subflow has dependencies but no path was given to the compiler
| db86e60a360e5f69afa22395c5012c31c1ad5a01 | bc8c8dccb791f43b88f55efad59373447c42bf04 | https://github.com/cloudslang/cloud-slang/compare/db86e60a360e5f69afa22395c5012c31c1ad5a01...bc8c8dccb791f43b88f55efad59373447c42bf04 | diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/utils/CompilerHelperImpl.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/utils/CompilerHelperImpl.java
index 9cd5a57f7..b3db9c95a 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/utils/CompilerHelperImpl.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/utils/CompilerHelperImpl.java
@@ -86,8 +86,6 @@ public class CompilerHelperImpl implements CompilerHelper{
}
for (String dependency:dependencies) {
Collection<File> dependenciesFiles = FileUtils.listFiles(new File(dependency), SLANG_FILE_EXTENSIONS, true);
- dependenciesFiles = select(dependenciesFiles, having(on(File.class).getPath(), not(containsString(SP_DIR))));
- dependenciesFiles = select(dependenciesFiles, having(on(File.class).getPath(), not(containsString(INPUT_DIR))));
depsSources.addAll(convert(dependenciesFiles, new Converter<File, SlangSource>() {
@Override
public SlangSource convert(File from) { | ['cloudslang-cli/src/main/java/io/cloudslang/lang/cli/utils/CompilerHelperImpl.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 332,678 | 64,561 | 8,629 | 104 | 248 | 47 | 2 | 1 | 968 | 80 | 213 | 18 | 0 | 0 | 1970-01-01T00:23:56 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,032 | cloudslang/cloud-slang/346/318 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/318 | https://github.com/CloudSlang/cloud-slang/pull/346 | https://github.com/CloudSlang/cloud-slang/pull/346 | 1 | fixes | LanguageEventData.getException throw ClassCastException | 2015-06-11 11:24:57,465 [1_WorkerExecutionThread-13_124100530](ExecutionServiceImpl.java:100) ERROR - Error during execution:
java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Exception
at io.cloudslang.lang.runtime.events.LanguageEventData.getException(LanguageEventData.java:106)
| c08df7c691bbe1aa9b93d818a9cf7fb535f4f90d | 7d3c502b657251f576f8d73b08b555b1db6d82d8 | https://github.com/cloudslang/cloud-slang/compare/c08df7c691bbe1aa9b93d818a9cf7fb535f4f90d...7d3c502b657251f576f8d73b08b555b1db6d82d8 | diff --git a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/tester/TriggerTestCaseEventListener.java b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/tester/TriggerTestCaseEventListener.java
index 88a2f0573..3a3cf1ab2 100644
--- a/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/tester/TriggerTestCaseEventListener.java
+++ b/cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/tester/TriggerTestCaseEventListener.java
@@ -21,8 +21,6 @@ package io.cloudslang.lang.tools.build.tester;
import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.runtime.events.LanguageEventData;
import org.apache.commons.collections4.MapUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
import io.cloudslang.score.events.EventConstants;
import io.cloudslang.score.events.ScoreEvent;
import io.cloudslang.score.events.ScoreEventListener;
@@ -60,7 +58,8 @@ public class TriggerTestCaseEventListener implements ScoreEventListener {
@Override
public synchronized void onEvent(ScoreEvent scoreEvent) throws InterruptedException {
- @SuppressWarnings("unchecked") Map<String,Serializable> data = (Map<String,Serializable>)scoreEvent.getData();
+ @SuppressWarnings("unchecked") Map<String,Serializable> data = (Map<String,Serializable>) scoreEvent.getData();
+ LanguageEventData eventData;
switch (scoreEvent.getEventType()){
case EventConstants.SCORE_FINISHED_EVENT :
break;
@@ -70,15 +69,18 @@ public class TriggerTestCaseEventListener implements ScoreEventListener {
flowFinished.set(true);
break;
case ScoreLangConstants.SLANG_EXECUTION_EXCEPTION:
- errorMessage.set((String)data.get(LanguageEventData.EXCEPTION));
+ eventData = (LanguageEventData) data;
+ errorMessage.set(eventData.getException());
flowFinished.set(true);
break;
case ScoreLangConstants.EVENT_EXECUTION_FINISHED :
- result = (String)data.get(LanguageEventData.RESULT);
+ eventData = (LanguageEventData) data;
+ result = eventData.getResult();
flowFinished.set(true);
break;
case ScoreLangConstants.EVENT_OUTPUT_END:
- Map<String, Serializable> extractOutputs = extractOutputs(data);
+ eventData = (LanguageEventData) data;
+ Map<String, Serializable> extractOutputs = extractOutputs(eventData);
if(MapUtils.isNotEmpty(extractOutputs)) {
outputs = extractOutputs;
}
@@ -90,18 +92,17 @@ public class TriggerTestCaseEventListener implements ScoreEventListener {
return new ReturnValues(outputs, result);
}
- private static Map<String, Serializable> extractOutputs(Map<String, Serializable> data) {
+ private static Map<String, Serializable> extractOutputs(LanguageEventData data) {
Map<String, Serializable> outputsMap = new HashMap<>();
boolean thereAreOutputsForRootPath =
data.containsKey(LanguageEventData.OUTPUTS)
&& data.containsKey(LanguageEventData.PATH)
- && data.get(LanguageEventData.PATH).equals(EXEC_START_PATH);
+ && data.getPath().equals(EXEC_START_PATH);
if (thereAreOutputsForRootPath) {
- @SuppressWarnings("unchecked") Map<String, Serializable> outputs =
- (Map<String, Serializable>) data.get(LanguageEventData.OUTPUTS);
+ Map<String, Serializable> outputs = data.getOutputs();
if (MapUtils.isNotEmpty(outputs)) outputsMap.putAll(outputs);
}
diff --git a/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/tester/SlangTestRunnerTest.java b/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/tester/SlangTestRunnerTest.java
index 89e2bef34..1a70d3a2a 100644
--- a/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/tester/SlangTestRunnerTest.java
+++ b/cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/tester/SlangTestRunnerTest.java
@@ -1,7 +1,5 @@
package io.cloudslang.lang.tools.build.tester;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Sets;
import io.cloudslang.lang.api.Slang;
import io.cloudslang.lang.compiler.SlangSource;
import io.cloudslang.lang.entities.CompilationArtifact;
@@ -449,7 +447,9 @@ public class SlangTestRunnerTest {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
ScoreEventListener listener = (ScoreEventListener) invocationOnMock.getArguments()[0];
- listener.onEvent(new ScoreEvent(ScoreLangConstants.EVENT_EXECUTION_FINISHED, ImmutableMap.of(LanguageEventData.RESULT, "SUCCESS")));
+ LanguageEventData data = new LanguageEventData();
+ data.setResult("SUCCESS");
+ listener.onEvent(new ScoreEvent(ScoreLangConstants.EVENT_EXECUTION_FINISHED, data));
return listener;
}
}).when(slang).subscribeOnEvents(any(ScoreEventListener.class), anySetOf(String.class));
@@ -460,9 +460,13 @@ public class SlangTestRunnerTest {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
ScoreEventListener listener = (ScoreEventListener) invocationOnMock.getArguments()[0];
- ImmutableMap data = ImmutableMap.of(LanguageEventData.OUTPUTS, outputs, LanguageEventData.PATH, "0");
+ LanguageEventData data = new LanguageEventData();
+ data.setOutputs(outputs);
+ data.setPath("0");
listener.onEvent(new ScoreEvent(ScoreLangConstants.EVENT_OUTPUT_END, data));
- listener.onEvent(new ScoreEvent(ScoreLangConstants.EVENT_EXECUTION_FINISHED, ImmutableMap.of(LanguageEventData.RESULT, "SUCCESS")));
+ data = new LanguageEventData();
+ data.setResult("SUCCESS");
+ listener.onEvent(new ScoreEvent(ScoreLangConstants.EVENT_EXECUTION_FINISHED, data));
return listener;
}
}).when(slang).subscribeOnEvents(any(ScoreEventListener.class), anySetOf(String.class));
@@ -473,7 +477,9 @@ public class SlangTestRunnerTest {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
ScoreEventListener listener = (ScoreEventListener) invocationOnMock.getArguments()[0];
- listener.onEvent(new ScoreEvent(ScoreLangConstants.SLANG_EXECUTION_EXCEPTION, ImmutableMap.of(LanguageEventData.EXCEPTION, "Error")));
+ LanguageEventData data = new LanguageEventData();
+ data.setException("Error");
+ listener.onEvent(new ScoreEvent(ScoreLangConstants.SLANG_EXECUTION_EXCEPTION, data));
return listener;
}
}).when(slang).subscribeOnEvents(any(ScoreEventListener.class), anySetOf(String.class));
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
index 2e3259526..ab1a63258 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
@@ -102,12 +102,20 @@ public class LanguageEventData extends HashMap<String, Serializable> {
put(PATH, path);
}
- public Exception getException() {
- return (Exception) get(EXCEPTION);
+ public String getResult() {
+ return (String) get(RESULT);
}
- public void setException(Exception ex) {
- put(EXCEPTION, ex);
+ public void setResult(String result) {
+ put(RESULT, result);
+ }
+
+ public String getException() {
+ return (String) get(EXCEPTION);
+ }
+
+ public void setException(String exceptionMessage) {
+ put(EXCEPTION, exceptionMessage);
}
public Map<String, Serializable> getInputs() {
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
index 864b3451a..5815b50f3 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
@@ -101,15 +101,27 @@ public class LanguageEventDataTest {
assertEquals(exePath, eventData.get(LanguageEventData.PATH));
}
+ /**
+ * Test method for {@link LanguageEventData#getResult()}.
+ */
+ @Test
+ public void testResult() {
+ String message = "Good";
+ eventData.setResult(message);
+ assertEquals(message, eventData.getResult());
+ assertEquals(message, eventData.get(LanguageEventData.RESULT));
+ }
+
/**
* Test method for {@link LanguageEventData#getException()}.
*/
@Test
public void testException() {
- Exception ex = new Exception("My exception");
- eventData.setException(ex);
- assertEquals(ex, eventData.getException());
- assertEquals(ex, eventData.get(LanguageEventData.EXCEPTION));
+ String message = "My exception";
+ Exception ex = new Exception(message);
+ eventData.setException(ex.getMessage());
+ assertEquals(message, eventData.getException());
+ assertEquals(message, eventData.get(LanguageEventData.EXCEPTION));
}
/**
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
index bc4ca6d7d..2d8e67cf6 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java
@@ -59,7 +59,7 @@ public class TriggerFlows {
ScoreEvent event = finishEvent.take();
if (event.getEventType().equals(ScoreLangConstants.SLANG_EXECUTION_EXCEPTION)){
LanguageEventData languageEvent = (LanguageEventData) event.getData();
- throw new RuntimeException((String) languageEvent.get(LanguageEventData.EXCEPTION));
+ throw new RuntimeException(languageEvent.getException());
}
slang.unSubscribeOnEvents(finishListener);
return event; | ['cloudslang-content-verifier/src/main/java/io/cloudslang/lang/tools/build/tester/TriggerTestCaseEventListener.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/TriggerFlows.java', 'cloudslang-content-verifier/src/test/java/io/cloudslang/lang/tools/build/tester/SlangTestRunnerTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 332,283 | 64,498 | 8,615 | 104 | 2,003 | 356 | 37 | 2 | 312 | 17 | 75 | 4 | 0 | 0 | 1970-01-01T00:23:55 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,035 | cloudslang/cloud-slang/281/259 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/259 | https://github.com/CloudSlang/cloud-slang/pull/281 | https://github.com/CloudSlang/cloud-slang/pull/281 | 1 | fixes | iterating over empty arrays problem | I ran over this usecase where a loop is supposed to iterate over an empty array and I get an unclear message
Command failed java.lang.RuntimeException: Slang Error : Error running: 'get_parent_image'
```
null
```
As @orius123 we should either show a warning about not being allowed to iterate over empty arrays or just skip the loop
| b1109134b20c17a780a4905ca7d2b9e1555da9ed | 3aa6eed8e31ac101b25d5cc89594c04cb65990d5 | https://github.com/cloudslang/cloud-slang/compare/b1109134b20c17a780a4905ca7d2b9e1555da9ed...3aa6eed8e31ac101b25d5cc89594c04cb65990d5 | diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBinding.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBinding.java
index 504ffaa27..6e8134ef7 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBinding.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBinding.java
@@ -27,24 +27,31 @@ import java.util.List;
@Component
public class AsyncLoopBinding {
+ public static final String ASYNC_LOOP_EXPRESSION_ERROR_MESSAGE = "Error evaluating async loop expression in task";
+
@Autowired
private ScriptEvaluator scriptEvaluator;
+ public static String generateAsyncLoopExpressionMessage(String nodeName, String message) {
+ return ASYNC_LOOP_EXPRESSION_ERROR_MESSAGE + " '" + nodeName + "', error is: \\n" + message;
+ }
+
public List<Serializable> bindAsyncLoopList(AsyncLoopStatement asyncLoopStatement, Context flowContext, String nodeName) {
- Validate.notNull(asyncLoopStatement, "async task statement cannot be null");
+ Validate.notNull(asyncLoopStatement, "async loop statement cannot be null");
Validate.notNull(flowContext, "flow context cannot be null");
Validate.notNull(nodeName, "node name cannot be null");
+ List<Serializable> evalResult;
try {
- @SuppressWarnings("unchecked") List<Serializable> evalResult = (List<Serializable>) scriptEvaluator.evalExpr(
+ evalResult = (List<Serializable>) scriptEvaluator.evalExpr(
asyncLoopStatement.getExpression(),
flowContext.getImmutableViewOfVariables());
- if (CollectionUtils.isEmpty(evalResult)) {
- throw new RuntimeException("Expression cannot be empty");
- }
- return evalResult;
} catch (Throwable t) {
- throw new RuntimeException("Error evaluating async loop expression in task '" + nodeName + "', error is: \\n" + t.getMessage(), t);
+ throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, t.getMessage()), t);
+ }
+ if (CollectionUtils.isEmpty(evalResult)) {
+ throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, "expression is empty"));
}
+ return evalResult;
}
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/LoopsBinding.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/LoopsBinding.java
index 2914640ae..d1c047018 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/LoopsBinding.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/LoopsBinding.java
@@ -34,6 +34,9 @@ import static io.cloudslang.lang.runtime.env.LoopCondition.LOOP_CONDITION_KEY;
@Component
public class LoopsBinding {
+ public static final String FOR_LOOP_EXPRESSION_ERROR_MESSAGE = "Error evaluating for loop expression in task";
+ public static final String INVALID_MAP_EXPRESSION_MESSAGE = "Invalid expression for iterating maps";
+
private final Logger logger = Logger.getLogger(getClass());
@Autowired
@@ -85,7 +88,7 @@ public class LoopsBinding {
try {
evalResult = scriptEvaluator.evalExpr(collectionExpression, variables);
} catch (Throwable t) {
- throw new RuntimeException("Error evaluating for loop expression in task '" + nodeName + "',\\n\\tError is: " + t.getMessage(), t);
+ throw new RuntimeException(FOR_LOOP_EXPRESSION_ERROR_MESSAGE + " '" + nodeName + "',\\n\\tError is: " + t.getMessage(), t);
}
if (forLoopStatement instanceof MapForLoopStatement) {
@@ -97,7 +100,7 @@ public class LoopsBinding {
}
evalResult = (Serializable) entriesAsSerializable;
} else {
- throw new RuntimeException("Invalid expression for iterating maps: " + collectionExpression);
+ throw new RuntimeException(INVALID_MAP_EXPRESSION_MESSAGE + ": " + collectionExpression);
}
}
@@ -107,6 +110,9 @@ public class LoopsBinding {
"in task: '" + nodeName + "' " +
"doesn't return an iterable, other types are not supported");
}
+ if (!forLoopCondition.hasMore()) {
+ throw new RuntimeException(FOR_LOOP_EXPRESSION_ERROR_MESSAGE + " '" + nodeName + "',\\n\\tError is: expression is empty");
+ }
return forLoopCondition;
}
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBindingTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBindingTest.java
index a3a416d75..023f816c3 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBindingTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBindingTest.java
@@ -67,6 +67,21 @@ public class AsyncLoopBindingTest {
assertEquals("returned async loop list not as expected", expectedList, actualList);
}
+ @Test
+ public void testEmptyExpressionThrowsException() throws Exception {
+ Map<String, Serializable> variables = new HashMap<>();
+ variables.put("key1", "value1");
+ variables.put("key2", "value2");
+ Context context = new Context(variables);
+
+ when(scriptEvaluator.evalExpr(eq("expression"), eq(variables))).thenReturn(Lists.newArrayList());
+
+ exception.expectMessage("expression is empty");
+ exception.expect(RuntimeException.class);
+
+ asyncLoopBinding.bindAsyncLoopList(createBasicSyncLoopStatement(), context, "nodeName");
+ }
+
@Test
public void testExceptionIsPropagated() throws Exception {
Map<String, Serializable> variables = new HashMap<>();
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/LoopsBindingTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/LoopsBindingTest.java
index f1fdf3558..854a6aa1f 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/LoopsBindingTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/LoopsBindingTest.java
@@ -14,6 +14,7 @@ import org.mockito.runners.MockitoJUnitRunner;
import io.cloudslang.lang.entities.LoopStatement;
import io.cloudslang.lang.runtime.env.Context;
import io.cloudslang.lang.runtime.env.ForLoopCondition;
+import org.python.google.common.collect.Lists;
import javax.script.ScriptEngine;
import java.io.Serializable;
@@ -49,13 +50,27 @@ public class LoopsBindingTest {
public void whenValueIsNotThereItWillBeCreated() throws Exception {
Context context = mock(Context.class);
when(scriptEvaluator.evalExpr(anyString(), anyMapOf(String.class, Serializable.class)))
- .thenReturn(new ArrayList<>());
+ .thenReturn(Lists.newArrayList(1));
HashMap<String, Serializable> langVars = new HashMap<>();
when(context.getLangVariables()).thenReturn(langVars);
loopsBinding.getOrCreateLoopCondition(createBasicForStatement(), context, "node");
Assert.assertEquals(true, context.getLangVariables().containsKey(LoopCondition.LOOP_CONDITION_KEY));
}
+ @Test
+ public void whenExpressionIsEmptyThrowsException() throws Exception {
+ Context context = mock(Context.class);
+ when(scriptEvaluator.evalExpr(anyString(), anyMapOf(String.class, Serializable.class)))
+ .thenReturn(Lists.newArrayList());
+ HashMap<String, Serializable> langVars = new HashMap<>();
+ when(context.getLangVariables()).thenReturn(langVars);
+
+ exception.expectMessage("expression is empty");
+ exception.expect(RuntimeException.class);
+
+ loopsBinding.getOrCreateLoopCondition(createBasicForStatement(), context, "node");
+ }
+
@Test(expected = RuntimeException.class)
public void passingNullLoopStatementThrowsException() throws Exception {
loopsBinding.getOrCreateLoopCondition(null, mock(Context.class), "aa");
@@ -109,4 +124,5 @@ public class LoopsBindingTest {
verify(context).putVariable("k", "john");
verify(context).putVariable("v", 1);
}
+
}
\\ No newline at end of file | ['cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBindingTest.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/bindings/LoopsBindingTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/AsyncLoopBinding.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/bindings/LoopsBinding.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 311,206 | 60,174 | 8,083 | 101 | 2,272 | 416 | 31 | 2 | 336 | 56 | 75 | 10 | 0 | 1 | 1970-01-01T00:23:50 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,034 | cloudslang/cloud-slang/310/300 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/300 | https://github.com/CloudSlang/cloud-slang/pull/310 | https://github.com/CloudSlang/cloud-slang/pull/310 | 1 | fixes | wrong path created by slang actions | the path created by slang action is not correct and also doe not represent the language syntax and how does the user expect to.
comment: i already have the fix
| 8ba75c87d81306fd768aa0ed52a922c0acfd1567 | d1000b475377c637b55c88594c452ac15a791c47 | https://github.com/cloudslang/cloud-slang/compare/8ba75c87d81306fd768aa0ed52a922c0acfd1567...d1000b475377c637b55c88594c452ac15a791c47 | diff --git a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
index 53e65db71..2658e94d7 100644
--- a/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
+++ b/cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java
@@ -73,9 +73,10 @@ public class SyncTriggerEventListener implements ScoreEventListener{
errorMessage.set(SLANG_STEP_ERROR_MSG + data.get(LanguageEventData.EXCEPTION));
break;
case ScoreLangConstants.EVENT_INPUT_END:
- String taskName = (String)data.get(LanguageEventData.levelName.TASK_NAME.name());
- if(StringUtils.isNotEmpty(taskName)){
- String path = (String) data.get(LanguageEventData.PATH);
+ LanguageEventData eventData = (LanguageEventData) data;
+ if(eventData.getStepType() == LanguageEventData.StepType.TASK){
+ String taskName = eventData.getStepName();
+ String path = eventData.getPath();
int matches = StringUtils.countMatches(path, ExecutionPath.PATH_SEPARATOR);
String prefix = StringUtils.repeat(TASK_PATH_PREFIX, matches);
printWithColor(Ansi.Color.YELLOW, prefix + taskName);
@@ -121,7 +122,7 @@ public class SyncTriggerEventListener implements ScoreEventListener{
private void printFinishEvent(Map<String, Serializable> data) {
String flowResult = (String)data.get(LanguageEventData.RESULT);
- String flowName = (String)data.get(LanguageEventData.levelName.EXECUTABLE_NAME.toString());
+ String flowName = (String)data.get(LanguageEventData.STEP_NAME);
printWithColor(Ansi.Color.CYAN,"Flow : " + flowName + " finished with result : " + flowResult);
}
diff --git a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/scorecompiler/ExecutionPlanBuilder.java b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/scorecompiler/ExecutionPlanBuilder.java
index d2ddd2c28..aa5859033 100644
--- a/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/scorecompiler/ExecutionPlanBuilder.java
+++ b/cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/scorecompiler/ExecutionPlanBuilder.java
@@ -1,28 +1,32 @@
/*******************************************************************************
-* (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License v2.0 which accompany this distribution.
-*
-* The Apache License is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-*******************************************************************************/
+ * (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License v2.0 which accompany this distribution.
+ *
+ * The Apache License is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *******************************************************************************/
package io.cloudslang.lang.compiler.scorecompiler;
import ch.lambdaj.Lambda;
+import io.cloudslang.lang.compiler.modeller.model.Flow;
+import io.cloudslang.lang.compiler.modeller.model.Operation;
import io.cloudslang.lang.compiler.modeller.model.Task;
import io.cloudslang.lang.entities.ResultNavigation;
import io.cloudslang.lang.entities.bindings.Result;
-import io.cloudslang.lang.compiler.modeller.model.Flow;
-import io.cloudslang.lang.compiler.modeller.model.Operation;
-import org.apache.commons.collections4.CollectionUtils;
import io.cloudslang.score.api.ExecutionPlan;
import io.cloudslang.score.api.ExecutionStep;
+import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import static ch.lambdaj.Lambda.having;
import static ch.lambdaj.Lambda.on;
@@ -106,23 +110,14 @@ public class ExecutionPlanBuilder {
if (isAsync) {
Long joinStepID = currentId + NUMBER_OF_ASYNC_LOOP_EXECUTION_STEPS + 1;
taskExecutionSteps.add(
- stepFactory.createAddBranchesStep(
- currentId++,
- joinStepID,
- currentId,
- task.getPreTaskActionData(),
- compiledFlow.getId(),
- taskName
+ stepFactory.createAddBranchesStep(currentId++, joinStepID, currentId,
+ task.getPreTaskActionData(), compiledFlow.getId(), taskName
)
);
}
taskExecutionSteps.add(
- stepFactory.createBeginTaskStep(
- currentId++,
- task.getInputs(),
- task.getPreTaskActionData(),
- task.getRefId(),
- taskName)
+ stepFactory.createBeginTaskStep(currentId++, task.getInputs(),
+ task.getPreTaskActionData(), task.getRefId(), taskName)
);
//End Task
@@ -131,39 +126,28 @@ public class ExecutionPlanBuilder {
String nextStepName = entry.getValue();
if (taskReferences.get(nextStepName) == null) {
Task nextTaskToCompile = Lambda.selectFirst(tasks, having(on(Task.class).getName(), equalTo(nextStepName)));
- if(nextTaskToCompile == null){
+ if (nextTaskToCompile == null) {
throw new RuntimeException("Failed to compile task: " + taskName + ". The task/result name: " + entry.getValue() + " of navigation: " + entry.getKey() + " -> " + entry.getValue() + " is missing");
}
taskExecutionSteps.addAll(buildTaskExecutionSteps(nextTaskToCompile, taskReferences, tasks, compiledFlow));
}
- long nextStepId = taskReferences.get(nextStepName);
- String presetResult = (FLOW_END_STEP_ID == nextStepId) ? nextStepName : null;
- navigationValues.put(entry.getKey(), new ResultNavigation(nextStepId, presetResult));
+ long nextStepId = taskReferences.get(nextStepName);
+ String presetResult = (FLOW_END_STEP_ID == nextStepId) ? nextStepName : null;
+ navigationValues.put(entry.getKey(), new ResultNavigation(nextStepId, presetResult));
}
if (isAsync) {
taskExecutionSteps.add(
- stepFactory.createFinishTaskStep(
- currentId++,
- task.getPostTaskActionData(),
- new HashMap<String, ResultNavigation>(),
- taskName,
- true)
+ stepFactory.createFinishTaskStep(currentId++, task.getPostTaskActionData(),
+ new HashMap<String, ResultNavigation>(), taskName, true)
);
taskExecutionSteps.add(
- stepFactory.createJoinBranchesStep(
- currentId,
- task.getPostTaskActionData(),
- navigationValues,
- taskName)
+ stepFactory.createJoinBranchesStep(currentId, task.getPostTaskActionData(),
+ navigationValues, taskName)
);
} else {
taskExecutionSteps.add(
- stepFactory.createFinishTaskStep(
- currentId,
- task.getPostTaskActionData(),
- navigationValues,
- taskName,
- false)
+ stepFactory.createFinishTaskStep(currentId, task.getPostTaskActionData(),
+ navigationValues, taskName, false)
);
}
return taskExecutionSteps;
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/env/ExecutionPath.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/env/ExecutionPath.java
index f2be9b704..787d59069 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/env/ExecutionPath.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/env/ExecutionPath.java
@@ -1,70 +1,61 @@
/*******************************************************************************
-* (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License v2.0 which accompany this distribution.
-*
-* The Apache License is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-*******************************************************************************/
+ * (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License v2.0 which accompany this distribution.
+ *
+ * The Apache License is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *******************************************************************************/
package io.cloudslang.lang.runtime.env;
+import org.apache.commons.lang3.StringUtils;
+
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.Deque;
-import java.util.Iterator;
+
+import static org.apache.commons.lang3.StringUtils.join;
/**
* @author moradi
* @since 06/11/2014
- * @version $Id$
*/
public class ExecutionPath implements Serializable {
- public static final String PATH_SEPARATOR = "/";
- private static final long serialVersionUID = 5536588094244112461L;
+ public static final String PATH_SEPARATOR = ".";
- private Deque<Integer> parentPositions;
- private int position;
+ private Deque<Integer> parentPositions;
+ private int position;
- public ExecutionPath() {
- parentPositions = new ArrayDeque<>();
- }
-
- public int forward() {
- return position++;
- }
+ public ExecutionPath() {
+ parentPositions = new ArrayDeque<>();
+ }
- public int down() {
- parentPositions.push(position);
- position = 0;
- return position;
- }
+ public void forward() {
+ position++;
+ }
- public int up() {
- position = parentPositions.pop();
- return position;
- }
+ public void down() {
+ parentPositions.push(position);
+ position = 0;
+ }
- public int getDepth() {
- return parentPositions.size();
- }
+ public void up() {
+ position = parentPositions.pop();
+ }
- public String getCurrentPath() {
+ public String getCurrentPath() {
return getCurrentPath(position);
- }
+ }
- public String getCurrentPathPeekForward() {
- return getCurrentPath(position + 1);
+ public String getParentPath() {
+ return join(parentPositions.descendingIterator(), PATH_SEPARATOR);
}
private String getCurrentPath(int position) {
- StringBuilder result = new StringBuilder();
- for(Iterator<Integer> iterator = parentPositions.descendingIterator(); iterator.hasNext();) {
- result.append(iterator.next()).append(PATH_SEPARATOR);
- }
- result.append(position);
- return result.toString();
+ String parents = getParentPath();
+ return StringUtils.isEmpty(parents) ? position + "" : parents + PATH_SEPARATOR + position;
}
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
index 0a2ede4a0..2e3259526 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java
@@ -1,12 +1,12 @@
/*******************************************************************************
-* (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License v2.0 which accompany this distribution.
-*
-* The Apache License is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-*******************************************************************************/
+ * (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License v2.0 which accompany this distribution.
+ *
+ * The Apache License is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *******************************************************************************/
package io.cloudslang.lang.runtime.events;
import java.io.Serializable;
@@ -17,95 +17,114 @@ import java.util.Map;
/**
* @author moradi
- * @since 03/11/2014
* @version $Id$
+ * @since 03/11/2014
*/
public class LanguageEventData extends HashMap<String, Serializable> {
- public static final String TYPE = "TYPE";
- public static final String DESCRIPTION = "DESCRIPTION";
- public static final String TIMESTAMP = "TIMESTAMP";
- public static final String EXECUTIONID = "EXECUTIONID";
- public static final String PATH = "PATH";
- public static final String EXCEPTION = "EXCEPTION";
- public static final String OUTPUTS = "OUTPUTS";
+ public static final String TYPE = "TYPE";
+ public static final String DESCRIPTION = "DESCRIPTION";
+ public static final String TIMESTAMP = "TIMESTAMP";
+ public static final String EXECUTION_ID = "EXECUTION_ID";
+ public static final String PATH = "PATH";
+ public static final String EXCEPTION = "EXCEPTION";
+ public static final String OUTPUTS = "OUTPUTS";
public static final String RESULT = "RESULT";
- public static final String CALL_ARGUMENTS = "CALL_ARGUMENTS";
- public static final String BOUND_INPUTS = "BOUND_INPUTS";
+ public static final String CALL_ARGUMENTS = "CALL_ARGUMENTS";
+ public static final String BOUND_INPUTS = "BOUND_INPUTS";
public static final String BOUND_ASYNC_LOOP_EXPRESSION = "BOUND_ASYNC_LOOP_EXPRESSION";
- public static final String RETURN_VALUES = "RETURN_VALUES";
+ public static final String RETURN_VALUES = "RETURN_VALUES";
public static final String NEXT_STEP_POSITION = "nextPosition";
- public static final String ENCRYPTED_VALUE = "*****";
- private static final long serialVersionUID = 2885051907156304718L;
+ public static final String ENCRYPTED_VALUE = "*****";
+ public static final String STEP_TYPE = "STEP_TYPE";
+ public static final String STEP_NAME = "STEP_NAME";
+
+ public enum StepType {
+ TASK,
+ EXECUTABLE,
+ ACTION,
+ NAVIGATION
+ }
- public enum levelName {
- TASK_NAME,
- EXECUTABLE_NAME
- }
+ public String getStepName() {
+ return (String) get(STEP_NAME);
+ }
- public String getEventType() {
- return (String)get(TYPE);
- }
+ public void setStepName(String stepName){
+ put(STEP_NAME, stepName);
+ }
- public void setEventType(String eventType) {
- put(TYPE, eventType);
- }
+ public StepType getStepType() {
+ return (StepType) get(STEP_TYPE);
+ }
- public String getDescription() {
- return (String)get(DESCRIPTION);
- }
+ public void setStepType(StepType stepType){
+ put(STEP_TYPE, stepType);
+ }
- public void setDescription(String description) {
- put(DESCRIPTION, description);
- }
+ public String getEventType() {
+ return (String) get(TYPE);
+ }
- public Date getTimeStamp() {
- return (Date)get(TIMESTAMP);
- }
+ public void setEventType(String eventType) {
+ put(TYPE, eventType);
+ }
- public void setTimeStamp(Date timeStamp) {
- put(TIMESTAMP, timeStamp);
- }
+ public String getDescription() {
+ return (String) get(DESCRIPTION);
+ }
- public Long getExecutionId() {
- return (Long)get(EXECUTIONID);
- }
+ public void setDescription(String description) {
+ put(DESCRIPTION, description);
+ }
- public void setExecutionId(Long executionId) {
- put(EXECUTIONID, executionId);
- }
+ public Date getTimeStamp() {
+ return (Date) get(TIMESTAMP);
+ }
- public String getPath() {
- return (String)get(PATH);
- }
+ public void setTimeStamp(Date timeStamp) {
+ put(TIMESTAMP, timeStamp);
+ }
- public void setPath(String path) {
- put(PATH, path);
- }
+ public Long getExecutionId() {
+ return (Long) get(EXECUTION_ID);
+ }
- public Exception getException() {
- return (Exception)get(EXCEPTION);
- }
+ public void setExecutionId(Long executionId) {
+ put(EXECUTION_ID, executionId);
+ }
- public void setException(Exception ex) {
- put(EXCEPTION, ex);
- }
+ public String getPath() {
+ return (String) get(PATH);
+ }
- public Map<String, Serializable> getInputs() {
- return (Map<String, Serializable>)get(BOUND_INPUTS);
- }
+ public void setPath(String path) {
+ put(PATH, path);
+ }
- public void setInputs(Map<String, Serializable> inputs) {
- put(BOUND_INPUTS, (Serializable)inputs);
- }
+ public Exception getException() {
+ return (Exception) get(EXCEPTION);
+ }
- public Map<String, Serializable> getOutputs() {
- return (Map<String, Serializable>)get(OUTPUTS);
- }
+ public void setException(Exception ex) {
+ put(EXCEPTION, ex);
+ }
- public void setOutputs(Map<String, Serializable> outputs) {
- put(OUTPUTS, (Serializable)outputs);
- }
+ public Map<String, Serializable> getInputs() {
+ return (Map<String, Serializable>) get(BOUND_INPUTS);
+ }
+
+ public void setInputs(Map<String, Serializable> inputs) {
+ put(BOUND_INPUTS, (Serializable) inputs);
+ }
+
+ public Map<String, Serializable> getOutputs() {
+ return (Map<String, Serializable>) get(OUTPUTS);
+ }
+
+ public void setOutputs(Map<String, Serializable> outputs) {
+ put(OUTPUTS, (Serializable) outputs);
+ }
public List<Serializable> getAsyncLoopBoundExpression() {
return (List<Serializable>) get(BOUND_ASYNC_LOOP_EXPRESSION);
@@ -114,5 +133,4 @@ public class LanguageEventData extends HashMap<String, Serializable> {
public void setAsyncLoopBoundExpression(List<Serializable> asyncLoopBoundExpression) {
put(BOUND_ASYNC_LOOP_EXPRESSION, (Serializable) asyncLoopBoundExpression);
}
-
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/navigations/Navigations.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/navigations/Navigations.java
index 3c06e98c1..d69cc3945 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/navigations/Navigations.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/navigations/Navigations.java
@@ -39,7 +39,8 @@ public class Navigations {
// If we have an error key stored, we fire an error event and return null as the next position
if(executionRuntimeServices.hasStepErrorKey()) {
- AbstractSteps.fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.SLANG_EXECUTION_EXCEPTION, "Error detected during step",
+ AbstractSteps.fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.SLANG_EXECUTION_EXCEPTION,
+ "Error detected during step", LanguageEventData.StepType.NAVIGATION, null,
Pair.of(LanguageEventData.EXCEPTION, executionRuntimeServices.getStepErrorKey()));
throw new RuntimeException(executionRuntimeServices.getStepErrorKey());
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AbstractSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AbstractSteps.java
index 78190b1c5..84fdb1238 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AbstractSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AbstractSteps.java
@@ -25,9 +25,13 @@ import java.util.Map.Entry;
public abstract class AbstractSteps {
- public void sendBindingInputsEvent(List<Input> inputs, final Map<String, Serializable> context, RunEnvironment runEnv,
- ExecutionRuntimeServices executionRuntimeServices, String desc, String nodeName,
- LanguageEventData.levelName levelName) {
+ public void sendBindingInputsEvent(List<Input> inputs,
+ final Map<String, Serializable> context,
+ RunEnvironment runEnv,
+ ExecutionRuntimeServices executionRuntimeServices,
+ String desc,
+ LanguageEventData.StepType stepType,
+ String stepName) {
Map<String, Serializable> inputsForEvent = new HashMap<>();
for (Input input : inputs) {
String inputName = input.getName();
@@ -38,8 +42,8 @@ public abstract class AbstractSteps {
inputsForEvent.put(inputName, inputValue);
}
}
- fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_INPUT_END, desc, Pair.of(LanguageEventData.BOUND_INPUTS,
- (Serializable) inputsForEvent), Pair.of(levelName.name(), nodeName));
+ fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_INPUT_END, desc, stepType, stepName,
+ Pair.of(LanguageEventData.BOUND_INPUTS, (Serializable) inputsForEvent));
}
@SafeVarargs
@@ -47,14 +51,11 @@ public abstract class AbstractSteps {
RunEnvironment runEnvironment,
String type,
String description,
+ LanguageEventData.StepType stepType,
+ String stepName,
Map.Entry<String, ? extends Serializable>... fields) {
- fireEvent(
- runtimeServices,
- type,
- description,
- runEnvironment.getExecutionPath().getCurrentPath(),
- fields
- );
+ fireEvent(runtimeServices, type, description,
+ runEnvironment.getExecutionPath().getCurrentPath(), stepType, stepName, fields);
}
@SafeVarargs
@@ -62,8 +63,12 @@ public abstract class AbstractSteps {
String type,
String description,
String path,
+ LanguageEventData.StepType stepType,
+ String stepName,
Map.Entry<String, ? extends Serializable>... fields) {
LanguageEventData eventData = new LanguageEventData();
+ eventData.setStepType(stepType);
+ eventData.setStepName(stepName);
eventData.setEventType(type);
eventData.setDescription(description);
eventData.setTimeStamp(new Date());
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
index 0bf4df3d2..cdad7ad78 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java
@@ -68,7 +68,9 @@ public class ActionSteps extends AbstractSteps {
Map<String, Serializable> returnValue = new HashMap<>();
Map<String, Serializable> callArguments = runEnv.removeCallArguments();
Map<String, SerializableSessionObject> serializableSessionData = runEnv.getSerializableDataMap();
- fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_ACTION_START, "Preparing to run action " + actionType, Pair.of(LanguageEventData.CALL_ARGUMENTS, (Serializable) callArguments));
+ fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_ACTION_START, "Preparing to run action " + actionType,
+ runEnv.getExecutionPath().getParentPath(), LanguageEventData.StepType.ACTION, null,
+ Pair.of(LanguageEventData.CALL_ARGUMENTS, (Serializable) callArguments));
try {
switch (actionType) {
case JAVA:
@@ -81,7 +83,9 @@ public class ActionSteps extends AbstractSteps {
break;
}
} catch (RuntimeException ex) {
- fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_ACTION_ERROR, ex.getMessage(), Pair.of(LanguageEventData.EXCEPTION, ex.getMessage()));
+ fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_ACTION_ERROR, ex.getMessage(),
+ runEnv.getExecutionPath().getParentPath(), LanguageEventData.StepType.ACTION, null,
+ Pair.of(LanguageEventData.EXCEPTION, ex.getMessage()));
logger.error(ex);
throw(ex);
}
@@ -90,7 +94,9 @@ public class ActionSteps extends AbstractSteps {
ReturnValues returnValues = new ReturnValues(returnValue, null);
runEnv.putReturnValues(returnValues);
- fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_ACTION_END, "Action performed", Pair.of(LanguageEventData.RETURN_VALUES, (Serializable)returnValue));
+ fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_ACTION_END, "Action performed",
+ runEnv.getExecutionPath().getParentPath(), LanguageEventData.StepType.ACTION, null,
+ Pair.of(LanguageEventData.RETURN_VALUES, (Serializable)returnValue));
runEnv.putNextStepPosition(nextStepId);
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
index de9499b79..c93a8782c 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
@@ -16,7 +16,6 @@ import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.entities.bindings.Output;
import io.cloudslang.lang.runtime.RuntimeConstants;
import io.cloudslang.lang.runtime.bindings.AsyncLoopBinding;
-import io.cloudslang.lang.runtime.bindings.LoopsBinding;
import io.cloudslang.lang.runtime.bindings.OutputsBinding;
import io.cloudslang.lang.runtime.env.*;
import io.cloudslang.lang.runtime.events.LanguageEventData;
@@ -48,9 +47,6 @@ public class AsyncLoopSteps extends AbstractSteps {
@Autowired
private AsyncLoopBinding asyncLoopBinding;
- @Autowired
- private LoopsBinding loopsBinding;
-
@Autowired
private OutputsBinding outputsBinding;
@@ -70,22 +66,21 @@ public class AsyncLoopSteps extends AbstractSteps {
List<Serializable> splitData = asyncLoopBinding.bindAsyncLoopList(asyncLoopStatement, flowContext, nodeName);
- fireEvent(
- executionRuntimeServices,
- ScoreLangConstants.EVENT_ASYNC_LOOP_EXPRESSION_END,
- "async loop expression bound",
- runEnv.getExecutionPath().getCurrentPathPeekForward(),
- Pair.of(LanguageEventData.BOUND_ASYNC_LOOP_EXPRESSION, (Serializable) splitData),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_ASYNC_LOOP_EXPRESSION_END,
+ "async loop expression bound", runEnv.getExecutionPath().getCurrentPath(),
+ LanguageEventData.StepType.TASK, nodeName,
+ Pair.of(LanguageEventData.BOUND_ASYNC_LOOP_EXPRESSION, (Serializable) splitData));
runEnv.putNextStepPosition(nextStepId);
+ runEnv.getExecutionPath().down();
for (Serializable splitItem : splitData) {
RunEnvironment branchRuntimeEnvironment = (RunEnvironment) SerializationUtils.clone(runEnv);
Context branchContext = (Context) SerializationUtils.clone(flowContext);
branchContext.putVariable(asyncLoopStatement.getVarName(), splitItem);
- updateCallArgumentsAndPushContextToStack(branchRuntimeEnvironment, branchContext, new HashMap<String, Serializable>());
+ updateCallArgumentsAndPushContextToStack(branchRuntimeEnvironment,
+ branchContext, new HashMap<String, Serializable>());
createBranch(
branchRuntimeEnvironment,
@@ -95,20 +90,15 @@ public class AsyncLoopSteps extends AbstractSteps {
nextStepId,
branchBeginStep);
- fireEvent(
- executionRuntimeServices,
- ScoreLangConstants.EVENT_BRANCH_START,
- "async loop branch created",
- runEnv.getExecutionPath().getCurrentPathPeekForward(),
- Pair.of(ScoreLangConstants.REF_ID, refId),
- Pair.of(RuntimeConstants.SPLIT_ITEM_KEY, splitItem),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ fireEvent(executionRuntimeServices, ScoreLangConstants.EVENT_BRANCH_START,
+ "async loop branch created", runEnv.getExecutionPath().getCurrentPath(),
+ LanguageEventData.StepType.TASK, nodeName, Pair.of(ScoreLangConstants.REF_ID, refId),
+ Pair.of(RuntimeConstants.SPLIT_ITEM_KEY, splitItem));
+
+ runEnv.getExecutionPath().forward();
}
updateCallArgumentsAndPushContextToStack(runEnv, flowContext, new HashMap<String, Serializable>());
-
- // forward after the branches are created because begin task method also calls forward
- runEnv.getExecutionPath().forward();
} catch (RuntimeException e) {
logger.error("There was an error running the add branches execution step of: \\'" + nodeName + "\\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: " + nodeName + ": " + e.getMessage(), e);
@@ -122,6 +112,7 @@ public class AsyncLoopSteps extends AbstractSteps {
@Param(ScoreLangConstants.TASK_NAVIGATION_KEY) Map<String, ResultNavigation> taskNavigationValues,
@Param(ScoreLangConstants.NODE_NAME_KEY) String nodeName) {
try {
+ runEnv.getExecutionPath().up();
List<Map<String, Serializable>> branchesContext = Lists.newArrayList();
Context flowContext = runEnv.getStack().popContext();
Map<String, Serializable> contextBeforeSplit = flowContext.getImmutableViewOfVariables();
@@ -146,6 +137,7 @@ public class AsyncLoopSteps extends AbstractSteps {
handleNavigationAndReturnValues(runEnv, executionRuntimeServices, taskNavigationValues, nodeName, publishValues, asyncLoopResult);
runEnv.getStack().pushContext(flowContext);
+ runEnv.getExecutionPath().forward();
} catch (RuntimeException e) {
logger.error("There was an error running the end task execution step of: \\'" + nodeName + "\\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: \\'" + nodeName + "\\': " + e.getMessage(), e);
@@ -172,15 +164,11 @@ public class AsyncLoopSteps extends AbstractSteps {
HashMap<String, Serializable> outputs = new HashMap<>(publishValues);
ReturnValues returnValues = new ReturnValues(outputs, presetResult != null ? presetResult : asyncLoopResult);
- fireEvent(
- executionRuntimeServices,
- runEnv,
- ScoreLangConstants.EVENT_ASYNC_LOOP_OUTPUT_END,
- "Async loop output binding finished",
+ fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_ASYNC_LOOP_OUTPUT_END,
+ "Async loop output binding finished", LanguageEventData.StepType.TASK, nodeName,
Pair.of(LanguageEventData.OUTPUTS, (Serializable) publishValues),
Pair.of(LanguageEventData.RESULT, returnValues.getResult()),
- Pair.of(LanguageEventData.NEXT_STEP_POSITION, nextStepPosition),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ Pair.of(LanguageEventData.NEXT_STEP_POSITION, nextStepPosition));
runEnv.putReturnValues(returnValues);
runEnv.putNextStepPosition(nextStepPosition);
@@ -213,10 +201,9 @@ public class AsyncLoopSteps extends AbstractSteps {
executionRuntimeServices,
runEnv,
ScoreLangConstants.EVENT_ASYNC_LOOP_OUTPUT_START,
- "Async loop output binding started",
+ "Async loop output binding started", LanguageEventData.StepType.TASK, nodeName,
Pair.of(ScoreLangConstants.TASK_AGGREGATE_KEY, (Serializable) taskAggregateValues),
- Pair.of(ScoreLangConstants.TASK_NAVIGATION_KEY, taskNavigationValues),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ Pair.of(ScoreLangConstants.TASK_NAVIGATION_KEY, taskNavigationValues));
return outputsBinding.bindOutputs(contextBeforeSplit, aggregateContext, taskAggregateValues);
}
@@ -239,13 +226,9 @@ public class AsyncLoopSteps extends AbstractSteps {
ReturnValues executableReturnValues = branchRuntimeEnvironment.removeReturnValues();
branchesResult.add(executableReturnValues.getResult());
- fireEvent(
- executionRuntimeServices,
- runEnv,
- ScoreLangConstants.EVENT_BRANCH_END,
- "async loop branch ended",
- Pair.of(RuntimeConstants.BRANCH_RETURN_VALUES_KEY, executableReturnValues),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName)
+ fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_BRANCH_END,
+ "async loop branch ended", LanguageEventData.StepType.TASK, nodeName,
+ Pair.of(RuntimeConstants.BRANCH_RETURN_VALUES_KEY, executableReturnValues)
);
}
}
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ExecutableSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ExecutableSteps.java
index de570b2c6..721f8f87e 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ExecutableSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ExecutableSteps.java
@@ -43,6 +43,8 @@ import static io.cloudslang.score.api.execution.ExecutionParametersConsts.EXECUT
@Component
public class ExecutableSteps extends AbstractSteps {
+ public static final String ACTION_RETURN_VALUES_KEY = "actionReturnValues";
+
@Autowired
private ResultsBinding resultsBinding;
@@ -61,7 +63,6 @@ public class ExecutableSteps extends AbstractSteps {
@Param(ScoreLangConstants.NODE_NAME_KEY) String nodeName,
@Param(ScoreLangConstants.NEXT_STEP_ID_KEY) Long nextStepId) {
try {
-// runEnv.getExecutionPath().forward(); // Start with 1 for consistency
Map<String, Serializable> callArguments = runEnv.removeCallArguments();
if (userInputs != null) {
@@ -84,11 +85,11 @@ public class ExecutableSteps extends AbstractSteps {
updateCallArgumentsAndPushContextToStack(runEnv, new Context(executableContext), actionArguments);
sendBindingInputsEvent(executableInputs, executableContext, runEnv, executionRuntimeServices,
- "Post Input binding for operation/flow", nodeName, LanguageEventData.levelName.EXECUTABLE_NAME);
+ "Post Input binding for operation/flow", LanguageEventData.StepType.EXECUTABLE, nodeName);
// put the next step position for the navigation
runEnv.putNextStepPosition(nextStepId);
- runEnv.getExecutionPath().down();
+ runEnv.getExecutionPath().down();
} catch (RuntimeException e){
logger.error("There was an error running the start executable execution step of: \\'" + nodeName + "\\'.\\n\\tError is: " + e.getMessage());
throw new RuntimeException("Error running: \\'" + nodeName + "\\'.\\n\\t " + e.getMessage(), e);
@@ -109,14 +110,15 @@ public class ExecutableSteps extends AbstractSteps {
@Param(EXECUTION_RUNTIME_SERVICES) ExecutionRuntimeServices executionRuntimeServices,
@Param(ScoreLangConstants.NODE_NAME_KEY) String nodeName) {
try {
- runEnv.getExecutionPath().up();
+ runEnv.getExecutionPath().up();
Context operationContext = runEnv.getStack().popContext();
Map<String, Serializable> operationVariables = operationContext == null ? null : operationContext.getImmutableViewOfVariables();
ReturnValues actionReturnValues = runEnv.removeReturnValues();
fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_OUTPUT_START, "Output binding started",
+ LanguageEventData.StepType.EXECUTABLE, nodeName,
Pair.of(ScoreLangConstants.EXECUTABLE_OUTPUTS_KEY, (Serializable) executableOutputs),
Pair.of(ScoreLangConstants.EXECUTABLE_RESULTS_KEY, (Serializable) executableResults),
- Pair.of("actionReturnValues", actionReturnValues), Pair.of(LanguageEventData.levelName.EXECUTABLE_NAME.toString(), nodeName));
+ Pair.of(ACTION_RETURN_VALUES_KEY, actionReturnValues));
// Resolving the result of the operation/flow
String result = resultsBinding.resolveResult(operationVariables, actionReturnValues.getOutputs(), executableResults, actionReturnValues.getResult());
@@ -128,19 +130,19 @@ public class ExecutableSteps extends AbstractSteps {
ReturnValues returnValues = new ReturnValues(operationReturnOutputs, result);
runEnv.putReturnValues(returnValues);
fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_OUTPUT_END, "Output binding finished",
+ LanguageEventData.StepType.EXECUTABLE, nodeName,
Pair.of(LanguageEventData.OUTPUTS, (Serializable) operationReturnOutputs),
- Pair.of(LanguageEventData.RESULT, returnValues.getResult()),
- Pair.of(LanguageEventData.levelName.EXECUTABLE_NAME.toString(), nodeName));
+ Pair.of(LanguageEventData.RESULT, returnValues.getResult()));
// If we have parent flow data on the stack, we pop it and request the score engine to switch to the parent
// execution plan id once it can, and we set the next position that was stored there for the use of the navigation
if (!runEnv.getParentFlowStack().isEmpty()) {
handleNavigationToParent(runEnv, executionRuntimeServices);
} else {
- fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_EXECUTION_FINISHED, "Execution finished running",
+ fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_EXECUTION_FINISHED,
+ "Execution finished running", LanguageEventData.StepType.EXECUTABLE, nodeName,
Pair.of(LanguageEventData.RESULT, returnValues.getResult()),
- Pair.of(LanguageEventData.OUTPUTS, (Serializable) operationReturnOutputs),
- Pair.of(LanguageEventData.levelName.EXECUTABLE_NAME.toString(), nodeName));
+ Pair.of(LanguageEventData.OUTPUTS, (Serializable) operationReturnOutputs));
}
} catch (RuntimeException e){
logger.error("There was an error running the finish executable execution step of: \\'" + nodeName + "\\'.\\n\\tError is: " + e.getMessage());
diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/TaskSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/TaskSteps.java
index a67888d04..9d6ab16c3 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/TaskSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/TaskSteps.java
@@ -65,7 +65,6 @@ public class TaskSteps extends AbstractSteps {
@Param(ScoreLangConstants.NEXT_STEP_ID_KEY) Long nextStepId,
@Param(ScoreLangConstants.REF_ID) String refId) {
try {
- runEnv.getExecutionPath().forward();
runEnv.removeCallArguments();
runEnv.removeReturnValues();
@@ -103,7 +102,7 @@ public class TaskSteps extends AbstractSteps {
//todo: hook
sendBindingInputsEvent(taskInputs, operationArguments, runEnv, executionRuntimeServices, "Task inputs resolved",
- nodeName, LanguageEventData.levelName.TASK_NAME);
+ LanguageEventData.StepType.TASK, nodeName);
updateCallArgumentsAndPushContextToStack(runEnv, flowContext, operationArguments);
@@ -112,7 +111,6 @@ public class TaskSteps extends AbstractSteps {
// set the start step of the given ref as the next step to execute (in the new running execution plan that will be set)
runEnv.putNextStepPosition(executionRuntimeServices.getSubFlowBeginStep(refId));
- runEnv.getExecutionPath().down();
} catch (RuntimeException e) {
logger.error("There was an error running the begin task execution step of: \\'" + nodeName + "\\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: " + nodeName + ": " + e.getMessage(), e);
@@ -133,15 +131,15 @@ public class TaskSteps extends AbstractSteps {
@Param(ScoreLangConstants.ASYNC_LOOP_KEY) boolean async_loop) {
try {
- if (runEnv.getExecutionPath().getDepth() > 0) runEnv.getExecutionPath().up();
Context flowContext = runEnv.getStack().popContext();
Map<String, Serializable> flowVariables = flowContext.getImmutableViewOfVariables();
ReturnValues executableReturnValues = runEnv.removeReturnValues();
fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_OUTPUT_START, "Output binding started",
+ LanguageEventData.StepType.TASK, nodeName,
Pair.of(ScoreLangConstants.TASK_PUBLISH_KEY, (Serializable) taskPublishValues),
Pair.of(ScoreLangConstants.TASK_NAVIGATION_KEY, (Serializable) taskNavigationValues),
- Pair.of("operationReturnValues", executableReturnValues), Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ Pair.of("operationReturnValues", executableReturnValues));
Map<String, Serializable> publishValues = outputsBinding.bindOutputs(flowVariables, executableReturnValues.getOutputs(), taskPublishValues);
@@ -154,6 +152,7 @@ public class TaskSteps extends AbstractSteps {
if (!shouldBreakLoop(breakOn, executableReturnValues) && loopCondition.hasMore()) {
runEnv.putNextStepPosition(previousStepId);
runEnv.getStack().pushContext(flowContext);
+ runEnv.getExecutionPath().forward();
return;
} else {
flowContext.getLangVariables().remove(LoopCondition.LOOP_CONDITION_KEY);
@@ -188,12 +187,13 @@ public class TaskSteps extends AbstractSteps {
ReturnValues returnValues = new ReturnValues(outputs, presetResult != null ? presetResult : executableResult);
runEnv.putReturnValues(returnValues);
fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_OUTPUT_END, "Output binding finished",
+ LanguageEventData.StepType.TASK, nodeName,
Pair.of(LanguageEventData.OUTPUTS, (Serializable) publishValues),
Pair.of(LanguageEventData.RESULT, returnValues.getResult()),
- Pair.of(LanguageEventData.NEXT_STEP_POSITION, nextPosition),
- Pair.of(LanguageEventData.levelName.TASK_NAME.name(), nodeName));
+ Pair.of(LanguageEventData.NEXT_STEP_POSITION, nextPosition));
runEnv.getStack().pushContext(flowContext);
+ runEnv.getExecutionPath().forward();
} catch (RuntimeException e) {
logger.error("There was an error running the end task execution step of: \\'" + nodeName + "\\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: \\'" + nodeName + "\\': " + e.getMessage(), e);
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
index d99eeba82..864b3451a 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java
@@ -15,9 +15,10 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.python.google.common.collect.Lists;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@@ -86,7 +87,7 @@ public class LanguageEventDataTest {
Long exeId = 123L;
eventData.setExecutionId(exeId);
assertEquals(exeId, eventData.getExecutionId());
- assertEquals(exeId, eventData.get(LanguageEventData.EXECUTIONID));
+ assertEquals(exeId, eventData.get(LanguageEventData.EXECUTION_ID));
}
/**
@@ -149,15 +150,17 @@ public class LanguageEventDataTest {
assertEquals(outputs, eventData.get(LanguageEventData.OUTPUTS));
}
+
/**
* Test method for {@link LanguageEventData#getAsyncLoopBoundExpression()}.
*/
@Test
public void testAsyncLoopBoundExpression() {
- List<Serializable> asyncLoopBoundExpression = Lists.newArrayList((Serializable) "a", "b", "c");
+ List<Serializable> asyncLoopBoundExpression = new ArrayList<Serializable>(Arrays.asList("a", "b", "c"));
eventData.setAsyncLoopBoundExpression(asyncLoopBoundExpression);
assertEquals(asyncLoopBoundExpression, eventData.getAsyncLoopBoundExpression());
assertEquals(asyncLoopBoundExpression, eventData.get(LanguageEventData.BOUND_ASYNC_LOOP_EXPRESSION));
}
+
}
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
index 672348c5b..0bd744ecc 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
@@ -174,6 +174,7 @@ public class AsyncLoopStepsTest {
public void testJoinBranchesAggregateContexts() throws Exception {
// prepare arguments
RunEnvironment runEnvironment = new RunEnvironment();
+ runEnvironment.getExecutionPath().down();
Map<String, Serializable> variables = new HashMap<>();
Context context = new Context(variables);
runEnvironment.getStack().pushContext(context);
@@ -230,6 +231,7 @@ public class AsyncLoopStepsTest {
public void testJoinBranchesNavigationAllBranchesSucced() throws Exception {
// prepare arguments
RunEnvironment runEnvironment = new RunEnvironment();
+ runEnvironment.getExecutionPath().down();
Map<String, Serializable> variables = new HashMap<>();
Context context = new Context(variables);
runEnvironment.getStack().pushContext(context);
@@ -279,6 +281,7 @@ public class AsyncLoopStepsTest {
public void testJoinBranchesNavigationOneBranchFails() throws Exception {
// prepare arguments
RunEnvironment runEnvironment = new RunEnvironment();
+ runEnvironment.getExecutionPath().down();
Map<String, Serializable> variables = new HashMap<>();
Context context = new Context(variables);
runEnvironment.getStack().pushContext(context);
@@ -334,6 +337,7 @@ public class AsyncLoopStepsTest {
public void testJoinBranchesEventsAreFired() throws Exception {
// prepare arguments
RunEnvironment runEnvironment = new RunEnvironment();
+ runEnvironment.getExecutionPath().down();
Map<String, Serializable> variables = new HashMap<>();
Context context = new Context(variables);
runEnvironment.getStack().pushContext(context);
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ExecutableStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ExecutableStepsTest.java
index da0228abd..365124ad8 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ExecutableStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ExecutableStepsTest.java
@@ -112,14 +112,15 @@ public class ExecutableStepsTest {
}
}
Assert.assertNotNull(boundInputEvent);
- Map<String,Serializable> eventData = (Map<String,Serializable>)boundInputEvent.getData();
+ LanguageEventData eventData = (LanguageEventData)boundInputEvent.getData();
Assert.assertTrue(eventData.containsKey(LanguageEventData.BOUND_INPUTS));
Map<String,Serializable> inputsBounded = (Map<String,Serializable>)eventData.get(LanguageEventData.BOUND_INPUTS);
Assert.assertEquals(5, inputsBounded.get("input1"));
Assert.assertEquals(LanguageEventData.ENCRYPTED_VALUE,inputsBounded.get("input2"));
- Assert.assertTrue(eventData.containsKey(LanguageEventData.levelName.EXECUTABLE_NAME.name()));
- Assert.assertEquals("dockerizeStep",eventData.get(LanguageEventData.levelName.EXECUTABLE_NAME.name()));
+ Assert.assertNotNull(eventData.getStepName());
+ Assert.assertEquals(LanguageEventData.StepType.EXECUTABLE, eventData.getStepType());
+ Assert.assertEquals("dockerizeStep", eventData.getStepName());
}
@Test
@@ -225,7 +226,7 @@ public class ExecutableStepsTest {
}
}
Assert.assertNotNull(startOutputEvent);
- Map<String,Serializable> eventData = (Map<String,Serializable>)startOutputEvent.getData();
+ LanguageEventData eventData = (LanguageEventData)startOutputEvent.getData();
Assert.assertTrue(eventData.containsKey(ScoreLangConstants.EXECUTABLE_OUTPUTS_KEY));
Assert.assertTrue(eventData.containsKey(ScoreLangConstants.EXECUTABLE_RESULTS_KEY));
List<Output> outputs= (List<Output>)eventData.get(ScoreLangConstants.EXECUTABLE_OUTPUTS_KEY);
@@ -234,17 +235,18 @@ public class ExecutableStepsTest {
Assert.assertEquals(possibleResults, results);
Assert.assertNotNull(boundOutputEvent);
- eventData = (Map<String,Serializable>)boundOutputEvent.getData();
+ eventData = (LanguageEventData)boundOutputEvent.getData();
Assert.assertTrue(eventData.containsKey(LanguageEventData.OUTPUTS));
- Map<String, String> returnOutputs= (Map<String, String>)eventData.get(LanguageEventData.OUTPUTS);
+ Map<String, Serializable> returnOutputs= eventData.getOutputs();
String returnResult= (String)eventData.get(LanguageEventData.RESULT);
- Assert.assertEquals("task1",eventData.get(LanguageEventData.levelName.EXECUTABLE_NAME.name()));
+ Assert.assertEquals("task1",eventData.getStepName());
+ Assert.assertEquals(LanguageEventData.StepType.EXECUTABLE, eventData.getStepType());
Assert.assertEquals(1, returnOutputs.size());
Assert.assertEquals("John", returnOutputs.get("name"));
Assert.assertTrue(returnResult.equals(ScoreLangConstants.SUCCESS_RESULT));
Assert.assertNotNull(executableFinishedEvent);
- eventData = (Map<String,Serializable>)executableFinishedEvent.getData();
+ eventData = (LanguageEventData)executableFinishedEvent.getData();
String result = (String)eventData.get(LanguageEventData.RESULT);
Map<String, String> eventOutputs = (Map<String, String>)eventData.get(LanguageEventData.OUTPUTS);
Assert.assertEquals(ScoreLangConstants.SUCCESS_RESULT, result);
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/TaskStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/TaskStepsTest.java
index 7c56aa9db..d4b7822ba 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/TaskStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/TaskStepsTest.java
@@ -143,8 +143,9 @@ public class TaskStepsTest {
ScoreEvent inputEvent = events.iterator().next();
Assert.assertEquals(ScoreLangConstants.EVENT_INPUT_END,inputEvent.getEventType());
- Map<String,Serializable> eventData = (Map<String,Serializable>)inputEvent.getData();
- Assert.assertEquals("task1",eventData.get(LanguageEventData.levelName.TASK_NAME.name()));
+ LanguageEventData eventData = (LanguageEventData)inputEvent.getData();
+ Assert.assertEquals("task1",eventData.getStepName());
+ Assert.assertEquals(LanguageEventData.StepType.TASK,eventData.getStepType());
Map<String,Serializable> boundInputs = (Map<String,Serializable>)eventData.get(LanguageEventData.BOUND_INPUTS);
Assert.assertEquals(5,boundInputs.get("input1"));
@@ -172,14 +173,16 @@ public class TaskStepsTest {
ScoreEvent outputStart = eventsIter.next();
Assert.assertEquals(ScoreLangConstants.EVENT_OUTPUT_START,outputStart.getEventType());
- Map<String,Serializable> eventData = (Map<String,Serializable>)outputStart.getData();
- Assert.assertEquals("task1",eventData.get(LanguageEventData.levelName.TASK_NAME.name()));
+ LanguageEventData eventData = (LanguageEventData)outputStart.getData();
+ Assert.assertEquals("task1",eventData.getStepName());
+ Assert.assertEquals(LanguageEventData.StepType.TASK,eventData.getStepType());
ScoreEvent outputEnd = eventsIter.next();
Assert.assertEquals(ScoreLangConstants.EVENT_OUTPUT_END,outputEnd.getEventType());
- eventData = (Map<String,Serializable>)outputEnd.getData();
- Assert.assertEquals("task1",eventData.get(LanguageEventData.levelName.TASK_NAME.name()));
+ eventData = (LanguageEventData)outputEnd.getData();
+ Assert.assertEquals("task1",eventData.getStepName());
+ Assert.assertEquals(LanguageEventData.StepType.TASK,eventData.getStepType());
}
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/AbstractAggregatorListener.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/AbstractAggregatorListener.java
index 6c738ff7f..05f1be1e4 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/AbstractAggregatorListener.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/AbstractAggregatorListener.java
@@ -23,9 +23,6 @@ import java.util.List;
*/
public abstract class AbstractAggregatorListener implements ScoreEventListener {
- public static final String TASK_NAME = LanguageEventData.levelName.TASK_NAME.name();
- public static final String EXECUTABLE_NAME = LanguageEventData.levelName.EXECUTABLE_NAME.name();
-
private final List<LanguageEventData> events = new ArrayList<>();
public List<LanguageEventData> getEvents() {
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/BranchAggregatorListener.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/BranchAggregatorListener.java
index 69dc61ca9..884f81c51 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/BranchAggregatorListener.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/BranchAggregatorListener.java
@@ -11,7 +11,6 @@ package io.cloudslang.lang.systemtests;
import ch.lambdaj.group.Group;
import com.google.common.collect.Lists;
-import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.runtime.RuntimeConstants;
import io.cloudslang.lang.runtime.env.ReturnValues;
import io.cloudslang.lang.runtime.events.LanguageEventData;
@@ -49,8 +48,7 @@ public class BranchAggregatorListener extends AbstractAggregatorListener {
for (LanguageEventData branchData : data) {
String path = branchData.getPath();
- String stepName = branchData.get(TASK_NAME) != null ? (String) branchData.get(TASK_NAME)
- : (String) branchData.get(EXECUTABLE_NAME);
+ String stepName = branchData.getStepName();
ReturnValues returnValues = (ReturnValues) branchData.get(RuntimeConstants.BRANCH_RETURN_VALUES_KEY);
branches.add(
new StepData(
@@ -58,7 +56,7 @@ public class BranchAggregatorListener extends AbstractAggregatorListener {
stepName,
new HashMap<String, Serializable>(),
returnValues.getOutputs(),
- returnValues.getResult()
+ null, returnValues.getResult()
)
);
}
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/JoinAggregatorListener.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/JoinAggregatorListener.java
index 2c883b0f4..9e7dc5695 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/JoinAggregatorListener.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/JoinAggregatorListener.java
@@ -41,8 +41,7 @@ public class JoinAggregatorListener extends AbstractAggregatorListener {
private StepData buildPublishAggregateData(LanguageEventData data) {
String path = data.getPath();
- String stepName = data.get(TASK_NAME) != null ? (String) data.get(TASK_NAME)
- : (String) data.get(EXECUTABLE_NAME);
+ String stepName = data.getStepName();
Map<String, Serializable> outputs = data.getOutputs();
String result = (String) data.get(LanguageEventData.RESULT);
return new StepData(
@@ -50,7 +49,7 @@ public class JoinAggregatorListener extends AbstractAggregatorListener {
stepName,
new HashMap<String, Serializable>(),
outputs,
- result
+ null, result
);
}
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/RunDataAggregatorListener.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/RunDataAggregatorListener.java
index 4c02de5cc..b79972861 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/RunDataAggregatorListener.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/RunDataAggregatorListener.java
@@ -10,10 +10,13 @@
package io.cloudslang.lang.systemtests;
+import ch.lambdaj.Lambda;
import ch.lambdaj.function.convert.Converter;
import ch.lambdaj.group.Group;
import io.cloudslang.lang.entities.ScoreLangConstants;
import io.cloudslang.lang.runtime.events.LanguageEventData;
+import org.apache.commons.collections4.CollectionUtils;
+import org.hamcrest.Matchers;
import java.io.Serializable;
import java.util.HashMap;
@@ -21,6 +24,7 @@ import java.util.List;
import java.util.Map;
import static ch.lambdaj.Lambda.*;
+import static org.hamcrest.Matchers.equalTo;
/*
* Created by orius123 on 24/12/14.
@@ -42,25 +46,37 @@ public class RunDataAggregatorListener extends AbstractAggregatorListener {
}
private StepData buildStepData(List<LanguageEventData> data) {
- Map<String, LanguageEventData> stepEvents = map(data, new Converter<LanguageEventData, String>() {
- @Override
- public String convert(LanguageEventData from) {
- return from.getEventType();
- }
- });
+ List<LanguageEventData> taskEvents = selectByStepType(data, LanguageEventData.StepType.TASK);
+ List<LanguageEventData> executableEvents = selectByStepType(data, LanguageEventData.StepType.EXECUTABLE);
+
+ LanguageEventData inputsEvent;
+ LanguageEventData outputsEvent;
+
+ if (CollectionUtils.isNotEmpty(taskEvents)) {
+ inputsEvent = selectByEventType(taskEvents, ScoreLangConstants.EVENT_INPUT_END);
+ outputsEvent = selectByEventType(taskEvents, ScoreLangConstants.EVENT_OUTPUT_END);
+ } else {
+ inputsEvent = selectByEventType(executableEvents, ScoreLangConstants.EVENT_INPUT_END);
+ outputsEvent = selectByEventType(executableEvents, ScoreLangConstants.EVENT_OUTPUT_END);
+ }
+ String path = inputsEvent.getPath();
+ String stepName = inputsEvent.getStepName();
+ Map<String, Serializable> inputs = inputsEvent.getInputs();
+
+ Map<String, Serializable> outputs = outputsEvent == null ? null : outputsEvent.getOutputs();
+ String result = outputsEvent == null ? null : (String) outputsEvent.get(LanguageEventData.RESULT);
- LanguageEventData inputsEvent = stepEvents.get(ScoreLangConstants.EVENT_INPUT_END);
- LanguageEventData outputsEvent = stepEvents.get(ScoreLangConstants.EVENT_OUTPUT_END);
+ String executableName = executableEvents.get(0).getStepName();
- String path = inputsEvent.getPath();
- String stepName = inputsEvent.get(TASK_NAME) != null ? (String) inputsEvent.get(TASK_NAME)
- : (String) inputsEvent.get(EXECUTABLE_NAME);
- Map<String, Serializable> inputs = inputsEvent.getInputs();
+ return new StepData(path, stepName, inputs, outputs, executableName, result);
+ }
- Map<String, Serializable> outputs = outputsEvent == null ? null : outputsEvent.getOutputs();
- String result = outputsEvent == null ? null : (String) outputsEvent.get(LanguageEventData.RESULT);
+ private List<LanguageEventData> selectByStepType(List<LanguageEventData> data, LanguageEventData.StepType stepType) {
+ return select(data, having(on(LanguageEventData.class).getStepType(), equalTo(stepType)));
+ }
- return new StepData(path, stepName, inputs, outputs, result);
+ private LanguageEventData selectByEventType(List<LanguageEventData> data, String eventType) {
+ return selectFirst(data, having(on(LanguageEventData.class).getEventType(), equalTo(eventType)));
}
}
diff --git a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/StepData.java b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/StepData.java
index 8036e3979..f168a5d7b 100644
--- a/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/StepData.java
+++ b/cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/StepData.java
@@ -19,13 +19,16 @@ public class StepData {
private final String name;
private final Map<String, Serializable> inputs;
private final Map<String, Serializable> outputs;
+ private final String executableName;
private final String result;
- public StepData(String path, String name, Map<String, Serializable> inputs, Map<String, Serializable> outputs, String result) {
+ public StepData(String path, String name, Map<String, Serializable> inputs,
+ Map<String, Serializable> outputs, String executableName, String result) {
this.path = path;
this.name = name;
this.inputs = inputs;
this.outputs = outputs;
+ this.executableName = executableName;
this.result = result;
}
@@ -45,6 +48,10 @@ public class StepData {
return outputs;
}
+ public String getExecutableName() {
+ return executableName;
+ }
+
public String getResult() {
return result;
}
diff --git a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/LoopFlowsTest.java b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/LoopFlowsTest.java
index e6e762d7d..9b1cde463 100644
--- a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/LoopFlowsTest.java
+++ b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/LoopFlowsTest.java
@@ -23,8 +23,10 @@ import java.util.Map;
import java.util.Set;
import static ch.lambdaj.Lambda.filter;
+import static ch.lambdaj.Lambda.select;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.startsWith;
public class LoopFlowsTest extends SystemsTestsParent{
@@ -196,7 +198,7 @@ public class LoopFlowsTest extends SystemsTestsParent{
}
private List<String> getTasksOnly(Map<String, StepData> stepsData) {
- return filter(not(endsWith("0")), stepsData.keySet());
+ return select(stepsData.keySet(), startsWith("0."));
}
private void verifyPersonMap(Map<String, StepData> stepsData) {
diff --git a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/SystemsTestsParent.java b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/SystemsTestsParent.java
index 2f8edcbaf..f632b7583 100644
--- a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/SystemsTestsParent.java
+++ b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/SystemsTestsParent.java
@@ -29,10 +29,10 @@ import java.util.Map;
public abstract class SystemsTestsParent {
protected static final String EXEC_START_PATH = "0";
- protected static final String FIRST_STEP_PATH = "0/1";
- protected static final String SECOND_STEP_KEY = "0/2";
- protected static final String THIRD_STEP_KEY = "0/3";
- protected static final String FOURTH_STEP_KEY = "0/4";
+ protected static final String FIRST_STEP_PATH = "0.0";
+ protected static final String SECOND_STEP_KEY = "0.1";
+ protected static final String THIRD_STEP_KEY = "0.2";
+ protected static final String FOURTH_STEP_KEY = "0.3";
@Autowired
protected Slang slang;
diff --git a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/flows/NavigationTest.java b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/flows/NavigationTest.java
index 2f757650a..c7013689c 100644
--- a/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/flows/NavigationTest.java
+++ b/cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/flows/NavigationTest.java
@@ -53,7 +53,6 @@ public class NavigationTest extends SystemsTestsParent {
Map<String, StepData> tasks = triggerWithData(compilationArtifact, userInputs, null).getTasks();
- Assert.assertEquals(5, tasks.size());
Assert.assertEquals("check_number", tasks.get(FIRST_STEP_PATH).getName());
Assert.assertEquals("process_even_number", tasks.get(SECOND_STEP_KEY).getName());
} | ['cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/SystemsTestsParent.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/StepData.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/JoinAggregatorListener.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ActionSteps.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/events/LanguageEventDataTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AbstractSteps.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/ExecutableSteps.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/BranchAggregatorListener.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/navigations/Navigations.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/env/ExecutionPath.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/AbstractAggregatorListener.java', 'cloudslang-compiler/src/main/java/io/cloudslang/lang/compiler/scorecompiler/ExecutionPlanBuilder.java', 'cloudslang-cli/src/main/java/io/cloudslang/lang/cli/services/SyncTriggerEventListener.java', 'cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/LoopFlowsTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/events/LanguageEventData.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/TaskStepsTest.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/ExecutableStepsTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/TaskSteps.java', 'cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java', 'cloudslang-tests/src/main/java/io/cloudslang/lang/systemtests/RunDataAggregatorListener.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java', 'cloudslang-tests/src/test/java/io/cloudslang/lang/systemtests/flows/NavigationTest.java'] | {'.java': 22} | 22 | 22 | 0 | 0 | 22 | 322,477 | 62,534 | 8,404 | 104 | 23,957 | 4,340 | 471 | 10 | 162 | 30 | 33 | 4 | 0 | 0 | 1970-01-01T00:23:53 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,031 | cloudslang/cloud-slang/347/330 | cloudslang | cloud-slang | https://github.com/CloudSlang/cloud-slang/issues/330 | https://github.com/CloudSlang/cloud-slang/pull/347 | https://github.com/CloudSlang/cloud-slang/pull/347 | 2 | fix | Join branches throws NPE when aggregating wrong output | when running flow:
https://gist.github.com/orius123/a5c2e9facf20e6d0de00
NPE is thrown from AsyncLoopSteps
my best guess is that something in this chain in null
[Relevant line](https://github.com/CloudSlang/cloud-slang/blob/7c2ff9c81ad706e065bf394f890b31c55ad6c038/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java#L224)
10x @genadi-hp for catching this issue
| 99c4ffa938433aadd8411a5f2c0b8d43141f3108 | 1c5448fc3155f51c61ebe7fe788e4de6ec2b9251 | https://github.com/cloudslang/cloud-slang/compare/99c4ffa938433aadd8411a5f2c0b8d43141f3108...1c5448fc3155f51c61ebe7fe788e4de6ec2b9251 | diff --git a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
index 46d486239..f7180b930 100644
--- a/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
+++ b/cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java
@@ -24,7 +24,10 @@ import io.cloudslang.lang.runtime.events.LanguageEventData;
import io.cloudslang.score.api.EndBranchDataContainer;
import io.cloudslang.score.api.execution.ExecutionParametersConsts;
import io.cloudslang.score.lang.ExecutionRuntimeServices;
+import io.cloudslang.score.lang.SystemContext;
+import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang.SerializationUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.log4j.Logger;
import org.python.google.common.collect.Lists;
@@ -46,6 +49,8 @@ import static io.cloudslang.score.api.execution.ExecutionParametersConsts.EXECUT
@Component
public class AsyncLoopSteps extends AbstractSteps {
+ public static final String BRANCH_EXCEPTION_PREFIX = "Error running branch";
+
@Autowired
private AsyncLoopBinding asyncLoopBinding;
@@ -128,7 +133,7 @@ public class AsyncLoopSteps extends AbstractSteps {
Map<String, Serializable> contextBeforeSplit = flowContext.getImmutableViewOfVariables();
List<String> branchesResult = Lists.newArrayList();
- collectBranchesData(runEnv, executionRuntimeServices, nodeName, branchesContext, branchesResult);
+ collectBranchesData(executionRuntimeServices, nodeName, branchesContext, branchesResult);
Map<String, Serializable> publishValues =
bindAggregateOutputs(
@@ -150,7 +155,7 @@ public class AsyncLoopSteps extends AbstractSteps {
runEnv.getExecutionPath().forward();
} catch (RuntimeException e) {
logger.error("There was an error running the end task execution step of: \\'" + nodeName + "\\'. Error is: " + e.getMessage());
- throw new RuntimeException("Error running: \\'" + nodeName + "\\': " + e.getMessage(), e);
+ throw new RuntimeException("Error running: \\'" + nodeName + "\\': \\n" + e.getMessage(), e);
}
}
@@ -219,7 +224,6 @@ public class AsyncLoopSteps extends AbstractSteps {
}
private void collectBranchesData(
- RunEnvironment runEnv,
ExecutionRuntimeServices executionRuntimeServices,
String nodeName,
List<Map<String, Serializable>> branchesContext,
@@ -227,6 +231,20 @@ public class AsyncLoopSteps extends AbstractSteps {
List<EndBranchDataContainer> branches = executionRuntimeServices.getFinishedChildBranchesData();
for (EndBranchDataContainer branch : branches) {
+
+ //first we check that no exception was thrown during the execution of the branch
+ String branchException = branch.getException();
+ if (StringUtils.isNotEmpty(branchException)) {
+ Map<String, Serializable> systemContextMap = branch.getSystemContext();
+ String branchID = null;
+ if (MapUtils.isNotEmpty(systemContextMap)) {
+ ExecutionRuntimeServices branchExecutionRuntimeServices = new SystemContext(systemContextMap);
+ branchID = branchExecutionRuntimeServices.getBranchId();
+ }
+ logger.error("There was an error running branch: " + branchID + " Error is: " + branchException);
+ throw new RuntimeException(BRANCH_EXCEPTION_PREFIX + ": \\n" + branchException);
+ }
+
Map<String, Serializable> branchContext = branch.getContexts();
RunEnvironment branchRuntimeEnvironment = (RunEnvironment) branchContext.get(ScoreLangConstants.RUN_ENV);
diff --git a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
index 1048b164c..38aba6912 100644
--- a/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
+++ b/cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java
@@ -37,6 +37,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import javax.script.ScriptEngine;
import java.io.Serializable;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -53,6 +54,8 @@ import static org.mockito.Mockito.*;
@ContextConfiguration(classes = AsyncLoopStepsTest.Config.class)
public class AsyncLoopStepsTest {
+ public static final String BRANCH_EXCEPTION_MESSAGE = "Exception details placeholder";
+
@Autowired
private AsyncLoopSteps asyncLoopSteps;
@@ -390,6 +393,29 @@ public class AsyncLoopStepsTest {
Assert.assertEquals(expectedEventTypesInOrder, actualEventTypesInOrder);
}
+ @Test
+ public void testExceptionIsCapturedFromBranches() throws Exception {
+ // prepare arguments
+ RunEnvironment runEnvironment = new RunEnvironment();
+ runEnvironment.getExecutionPath().down();
+ Map<String, Serializable> variables = new HashMap<>();
+ Context context = new Context(variables);
+ runEnvironment.getStack().pushContext(context);
+
+ ExecutionRuntimeServices executionRuntimeServices = createExecutionRuntimeServicesMockWithBranchException();
+
+ exception.expectMessage(BRANCH_EXCEPTION_MESSAGE);
+ exception.expect(RuntimeException.class);
+
+ asyncLoopSteps.joinBranches(
+ runEnvironment,
+ executionRuntimeServices,
+ new ArrayList<Output>(0),
+ new HashMap<String, ResultNavigation>(),
+ "nodeName"
+ );
+ }
+
private ExecutionRuntimeServices createAndConfigureExecutionRuntimeServicesMock(
Map<String, Serializable> runtimeContext1,
Map<String, Serializable> runtimeContext2,
@@ -445,6 +471,18 @@ public class AsyncLoopStepsTest {
return executionRuntimeServices;
}
+ private ExecutionRuntimeServices createExecutionRuntimeServicesMockWithBranchException() {
+ ExecutionRuntimeServices executionRuntimeServices = mock(ExecutionRuntimeServices.class);
+ List<EndBranchDataContainer> branchesContainers = Lists.newArrayList(
+ new EndBranchDataContainer(
+ new HashMap<String, Serializable>(),
+ new HashMap<String, Serializable>(),
+ BRANCH_EXCEPTION_MESSAGE)
+ );
+ when(executionRuntimeServices.getFinishedChildBranchesData()).thenReturn(branchesContainers);
+ return executionRuntimeServices;
+ }
+
@Configuration
static class Config {
| ['cloudslang-runtime/src/test/java/io/cloudslang/lang/runtime/steps/AsyncLoopStepsTest.java', 'cloudslang-runtime/src/main/java/io/cloudslang/lang/runtime/steps/AsyncLoopSteps.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 333,169 | 64,648 | 8,642 | 104 | 1,534 | 276 | 24 | 1 | 398 | 28 | 126 | 9 | 2 | 0 | 1970-01-01T00:23:55 | 226 | Java | {'Java': 2503341, 'Slash': 727911, 'Python': 9681, 'Shell': 1856, 'CSS': 1039, 'JavaScript': 795} | Apache License 2.0 |
1,479 | hashgraph/hedera-services/796/793 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/793 | https://github.com/hashgraph/hedera-services/pull/796 | https://github.com/hashgraph/hedera-services/pull/796 | 1 | closes | Fix UpdateNode test failure | **Summary of the defect**
The Update Node test that has passed in the branch previously failed with this error on master.
```
Node 0 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:40:50.999999425Z! (java.lang.NullPointerException)
Node 0 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:47:25.999999193Z! (java.lang.NullPointerException)
Node 1 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:40:50.999999425Z! (java.lang.NullPointerException)
Node 1 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:47:25.999999193Z! (java.lang.NullPointerException)
Node 2 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:40:50.999999425Z! (java.lang.NullPointerException)
Node 2 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:47:25.999999193Z! (java.lang.NullPointerException)
Node 3 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:40:50.999999425Z! (java.lang.NullPointerException)
Node 3 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:47:25.999999193Z! (java.lang.NullPointerException)
Node 4 has exception:- Only default usage prices available for function Freeze @ 2020-11-20T04:47:25.999999193Z! (java.lang.NullPointerException)
```
**How to reproduce (if possible)**
**Service logs (if applicable)**
https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1605848071329100
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| 6622d3154982475ce83e1a82adc4bd1c1e8db1af | 3661caee928fa2c58855a3f07ce34d3ced107e2e | https://github.com/hashgraph/hedera-services/compare/6622d3154982475ce83e1a82adc4bd1c1e8db1af...3661caee928fa2c58855a3f07ce34d3ced107e2e | diff --git a/hedera-node/src/main/java/com/hedera/services/fees/calculation/AwareFcfsUsagePrices.java b/hedera-node/src/main/java/com/hedera/services/fees/calculation/AwareFcfsUsagePrices.java
index 315c15bd5c..a70b378c4a 100644
--- a/hedera-node/src/main/java/com/hedera/services/fees/calculation/AwareFcfsUsagePrices.java
+++ b/hedera-node/src/main/java/com/hedera/services/fees/calculation/AwareFcfsUsagePrices.java
@@ -9,9 +9,9 @@ package com.hedera.services.fees.calculation;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -118,10 +118,10 @@ public class AwareFcfsUsagePrices implements UsagePricesProvider {
return usagePrices;
} catch (Exception e) {
log.warn(
- "Only default usage prices available for function {} @ {}! ({})",
+ "Default usage price will be used, no specific usage prices available for function {} @ {}!",
function,
- Instant.ofEpochSecond(at.getSeconds(), at.getNanos()),
- e);
+ Instant.ofEpochSecond(at.getSeconds(), at.getNanos())
+ );
}
return DEFAULT_USAGE_PRICES;
}
diff --git a/hedera-node/src/test/java/com/hedera/services/fees/calculation/AwareFcfsUsagePricesTest.java b/hedera-node/src/test/java/com/hedera/services/fees/calculation/AwareFcfsUsagePricesTest.java
index 7f3054d461..e3b06f824a 100644
--- a/hedera-node/src/test/java/com/hedera/services/fees/calculation/AwareFcfsUsagePricesTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/fees/calculation/AwareFcfsUsagePricesTest.java
@@ -9,9 +9,9 @@ package com.hedera.services.fees.calculation;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -206,7 +206,7 @@ class AwareFcfsUsagePricesTest {
// then:
assertEquals(DEFAULT_USAGE_PRICES, actual);
assertEquals(1, mockAppender.size());
- assertEquals("WARN - Only default usage prices available for function UNRECOGNIZED @ 1970-01-15T06:56:06Z! (java.lang.NullPointerException)",
+ assertEquals("WARN - Default usage price will be used, no specific usage prices available for function UNRECOGNIZED @ 1970-01-15T06:56:06Z!",
mockAppender.get(0));
// tearDown: | ['hedera-node/src/main/java/com/hedera/services/fees/calculation/AwareFcfsUsagePrices.java', 'hedera-node/src/test/java/com/hedera/services/fees/calculation/AwareFcfsUsagePricesTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 7,095,280 | 1,656,830 | 190,259 | 1,019 | 329 | 80 | 10 | 1 | 1,718 | 184 | 453 | 24 | 1 | 1 | 1970-01-01T00:26:45 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,478 | hashgraph/hedera-services/799/687 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/687 | https://github.com/hashgraph/hedera-services/pull/799 | https://github.com/hashgraph/hedera-services/pull/799 | 1 | closes | SuiteRunner's PayerID creation when failed should fall back to Default ID | <!-- Thanks for submitting a bug report! Before submitting:
1. Try searching the existing issues to see if your issue has already been reported
2. If you're reporting a security vulnerability, please email security@hedera.com instead of opening an issue
-->
**Summary of the defect**
When the process of creating a unique payer ID for a SuiteRunner fails, the payer ID is set to an empty string which is invalid and thus creating invalid transactions in the following steps.
**How to reproduce (if possible)**
Run any suiteRunner Spec which can break initial account creation, like sending a **cryptocreate** txn to a **publictestnet** state with wrong keys.
| e48ff72385325f9ebf2d208dbee2236ba4059d9f | a0a1818d3f57247f21f743e95b4fd8b3810afb42 | https://github.com/hashgraph/hedera-services/compare/e48ff72385325f9ebf2d208dbee2236ba4059d9f...a0a1818d3f57247f21f743e95b4fd8b3810afb42 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
index b896ebf365..29c8e0ac9c 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
@@ -129,6 +129,7 @@ import java.util.stream.Stream;
import static com.hedera.services.bdd.spec.HapiSpecSetup.NodeSelection.FIXED;
import static com.hedera.services.bdd.spec.HapiSpecSetup.TlsConfig.OFF;
+import static com.hedera.services.bdd.spec.transactions.TxnUtils.isIdLiteral;
import static com.hedera.services.bdd.suites.HapiApiSuite.FinalOutcome;
import static java.util.concurrent.CompletableFuture.runAsync;
import static java.util.stream.Collectors.groupingBy;
@@ -146,7 +147,7 @@ public class SuiteRunner {
private static final int EXPECTED_DEV_NETWORK_SIZE = 3;
private static final int EXPECTED_CI_NETWORK_SIZE = 4;
- private static final String DEFAULT_PAYER_ID = "2";
+ private static final String DEFAULT_PAYER_ID = "0.0.2";
public static int expectedNetworkSize = EXPECTED_DEV_NETWORK_SIZE;
@@ -374,6 +375,9 @@ public class SuiteRunner {
Thread.sleep(r.nextInt(5000));
new CryptoCreateForSuiteRunner(nodes, defaultNode).runSuiteAsync();
Thread.sleep(2000);
+ if(!isIdLiteral(payerId)){
+ payerId = DEFAULT_PAYER_ID;
+ }
} catch (InterruptedException e) {
e.printStackTrace();
} | ['test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 7,097,055 | 1,657,269 | 190,302 | 1,019 | 260 | 69 | 6 | 1 | 674 | 105 | 141 | 11 | 0 | 0 | 1970-01-01T00:26:45 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,477 | hashgraph/hedera-services/807/806 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/806 | https://github.com/hashgraph/hedera-services/pull/807 | https://github.com/hashgraph/hedera-services/pull/807 | 1 | closes | Ensure SystemFileManager.loadAllSystemFiles is called in ServicesMain.init before any properties are accessed | **Summary of the defect**
Currently `ServicesMain.init` starts the record stream thread _before_ calling `SystemFilesManager.loadAllSystemFiles` if necessary (as on a restart). Hence when this call _is_ necessary, the record stream directory will always be the default. | e48ff72385325f9ebf2d208dbee2236ba4059d9f | 14cf74b707dc128bad5fd9e700feb23b0c75fdb9 | https://github.com/hashgraph/hedera-services/compare/e48ff72385325f9ebf2d208dbee2236ba4059d9f...14cf74b707dc128bad5fd9e700feb23b0c75fdb9 | diff --git a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
index 61a767710f..8264b007bd 100644
--- a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
+++ b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
@@ -142,28 +142,28 @@ public class ServicesMain implements SwirldMain {
}
private void contextDrivenInit() {
- registerIssListener();
- log.info("Platform callbacks registered.");
checkPropertySources();
log.info("Property sources are available.");
- configurePlatform();
- log.info("Platform is configured.");
+ initSystemFiles();
+ log.info("System files rationalized.");
+ createSystemAccountsIfNeeded();
+ log.info("System accounts initialized.");
migrateStateIfNeeded();
log.info("Migrations complete.");
- startRecordStreamThread();
- log.info("Record stream started.");
- startNettyIfAppropriate();
- log.info("Netty started.");
- createSystemAccountsIfNeeded();
- log.info("System accounts rationalized.");
validateLedgerState();
log.info("Ledger state ok.");
- createSystemFilesIfNeeded();
- log.info("System files rationalized.");
+ configurePlatform();
+ log.info("Platform is configured.");
+ registerIssListener();
+ log.info("Platform callbacks registered.");
exportAccountsIfDesired();
log.info("Accounts exported.");
initializeStats();
log.info("Stats initialized.");
+ startRecordStreamThread();
+ log.info("Record stream started in directory {}.", ctx.recordStream().getRecordStreamsDirectory());
+ startNettyIfAppropriate();
+ log.info("Netty started.");
log.info("Completed initialization of {} #{}", ctx.nodeType(), ctx.id());
}
@@ -181,7 +181,7 @@ public class ServicesMain implements SwirldMain {
}
}
- private void createSystemFilesIfNeeded() {
+ private void initSystemFiles() {
try {
ctx.systemFilesManager().createAddressBookIfMissing();
ctx.systemFilesManager().createNodeDetailsIfMissing();
@@ -238,10 +238,6 @@ public class ServicesMain implements SwirldMain {
return myNodeAccount.equals(blessedNodeAccount);
}
- private void loadFeeSchedule() {
- ctx.fees().init();
- }
-
private void initializeStats() {
ctx.statsManager().initializeFor(ctx.platform());
}
diff --git a/hedera-node/src/main/java/com/hedera/services/legacy/stream/RecordStream.java b/hedera-node/src/main/java/com/hedera/services/legacy/stream/RecordStream.java
index 507d04bea9..b578231b0f 100644
--- a/hedera-node/src/main/java/com/hedera/services/legacy/stream/RecordStream.java
+++ b/hedera-node/src/main/java/com/hedera/services/legacy/stream/RecordStream.java
@@ -85,7 +85,7 @@ public class RecordStream implements Runnable {
Platform platform;
MiscRunningAvgs runningAvgs;
boolean inFreeze;
- String recordStreamsDirectory;
+ final String recordStreamsDirectory;
private final PropertySource properties;
@@ -395,6 +395,10 @@ public class RecordStream implements Runnable {
return recordBuffer.size();
}
+ public String getRecordStreamsDirectory() {
+ return recordStreamsDirectory;
+ }
+
/**
* Read the FileHash from the record stream signature file
*
diff --git a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
index f4d7638cec..1467f14fae 100644
--- a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
@@ -243,6 +243,7 @@ public class ServicesMainTest {
public void initializesSanelyGivenPreconditions() {
// given:
InOrder inOrder = inOrder(
+ systemFilesManager,
propertySources,
platform,
stateMigrations,
@@ -257,14 +258,15 @@ public class ServicesMainTest {
subject.init(null, new NodeId(false, NODE_ID));
// then:
- inOrder.verify(platform).addSignedStateListener(any(IssListener.class));
inOrder.verify(propertySources).assertSourcesArePresent();
- inOrder.verify(platform).setSleepAfterSync(0L);
+ inOrder.verify(systemFilesManager).loadAllSystemFiles();
inOrder.verify(stateMigrations).runAllFor(ctx);
- inOrder.verify(recordStreamThread).start();
inOrder.verify(ledgerValidator).assertIdsAreValid(accounts);
inOrder.verify(ledgerValidator).hasExpectedTotalBalance(accounts);
+ inOrder.verify(platform).setSleepAfterSync(0L);
+ inOrder.verify(platform).addSignedStateListener(any(IssListener.class));
inOrder.verify(statsManager).initializeFor(platform);
+ inOrder.verify(recordStreamThread).start();
}
@Test | ['hedera-node/src/main/java/com/hedera/services/ServicesMain.java', 'hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java', 'hedera-node/src/main/java/com/hedera/services/legacy/stream/RecordStream.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 7,097,055 | 1,657,269 | 190,302 | 1,019 | 1,198 | 240 | 36 | 2 | 270 | 35 | 57 | 2 | 0 | 0 | 1970-01-01T00:26:46 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,475 | hashgraph/hedera-services/1019/1008 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1008 | https://github.com/hashgraph/hedera-services/pull/1019 | https://github.com/hashgraph/hedera-services/pull/1019 | 1 | closes | HCS-Restart-Performance-Testnet-10k-46m fails on master | **Summary of the defect**
HCS-Restart-Performance-Testnet-10k-46m fails on master with 0 TPS . Investigate the reason for it and fix the issue.
**How to reproduce (if possible)**
https://hedera-hashgraph.slack.com/archives/C018Y4E6ADT/p1611476677006700
https://hedera-hashgraph.slack.com/archives/C018Y4E6ADT/p1611562896008600
**Service logs (if applicable)**
```
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| d3ad20925b92b2fc9bc1bbafbe9fe99cac523ee3 | 02fd7183f517d2df68e9ae1d8fada50b8375de3c | https://github.com/hashgraph/hedera-services/compare/d3ad20925b92b2fc9bc1bbafbe9fe99cac523ee3...02fd7183f517d2df68e9ae1d8fada50b8375de3c | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/SubmitMessageLoadTest.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/SubmitMessageLoadTest.java
index 5eb275029c..186524480c 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/SubmitMessageLoadTest.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/SubmitMessageLoadTest.java
@@ -119,7 +119,6 @@ public class SubmitMessageLoadTest extends LoadTest {
private static HapiApiSpec runSubmitMessages() {
PerfTestLoadSettings settings = new PerfTestLoadSettings();
final AtomicInteger submittedSoFar = new AtomicInteger(0);
-
Supplier<HapiSpecOperation[]> submitBurst = () -> new HapiSpecOperation[] {
opSupplier(settings).get()
};
@@ -163,10 +162,23 @@ public class SubmitMessageLoadTest extends LoadTest {
: settings.getIntProperty("messageSize", messageSize)
- r.nextInt(settings.getHcsSubmitMessageSizeVar());
- // maybe use some more realistic distributions to simulate real world scenarios
- String senderId = String.format("0.0.%d", TEST_ACCOUNT_STARTS_FROM + r.nextInt(settings.getTotalAccounts()));
- String topicId = String.format("0.0.%d", TEST_ACCOUNT_STARTS_FROM + settings.getTotalAccounts()
- + r.nextInt(settings.getTotalTopics() ));
+ String senderId = "sender";
+ String topicId = "topic";
+ String senderKey= "sender";
+ String submitKey = "submitKey";
+ if(settings.getTotalAccounts() > 1) {
+ int s = r.nextInt(settings.getTotalAccounts());
+ int re = 0;
+ do {
+ re = r.nextInt(settings.getTotalAccounts());
+ } while (re == s);
+ // maybe use some more realistic distributions to simulate real world scenarios
+ senderId = String.format("0.0.%d", TEST_ACCOUNT_STARTS_FROM + r.nextInt(settings.getTotalAccounts()));
+ topicId = String.format("0.0.%d", TEST_ACCOUNT_STARTS_FROM + settings.getTotalAccounts()
+ + r.nextInt(settings.getTotalTopics() ));
+ senderKey = GENESIS;
+ submitKey = GENESIS;
+ }
if(log.isDebugEnabled()) {
log.debug("{} will submit a message of size {} to topic {}", senderId, msgSize, topicId);
@@ -176,7 +188,7 @@ public class SubmitMessageLoadTest extends LoadTest {
randomUtf8Bytes( msgSize - 8)))
.noLogging()
.payingWith(senderId)
- .signedBy(GENESIS, GENESIS)
+ .signedBy(senderKey, submitKey)
.fee(100_000_000)
.suppressStats(true)
.hasRetryPrecheckFrom(BUSY, DUPLICATE_TRANSACTION, PLATFORM_TRANSACTION_NOT_CREATED, | ['test-clients/src/main/java/com/hedera/services/bdd/suites/perf/SubmitMessageLoadTest.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 5,784,728 | 1,385,553 | 163,086 | 1,026 | 1,121 | 279 | 24 | 1 | 642 | 73 | 206 | 20 | 2 | 1 | 1970-01-01T00:26:51 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,476 | hashgraph/hedera-services/829/814 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/814 | https://github.com/hashgraph/hedera-services/pull/829 | https://github.com/hashgraph/hedera-services/pull/829 | 1 | closes | AWS daily APIPermissionUpdate reconnect fails | **Summary of the defect**
The test failed due to PLATFORM_TRANSACTION_NOT_CREATED.
**How to reproduce (if possible)**
Please refer to the slack URL: https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1606723088018100
and the detailed log file: https://s3.console.aws.amazon.com/s3/object/hedera-service-regression-jrs?region=us-east-2&prefix=4N_1C/Reconnect/20201130-0717-AWS-Daily-Services-Comp-Reconnect-4N-1C/APIPermissionUpdate-Reconnect-1-12m.json/node0000-TestClient/hapi-client-combined.log
**Service logs (if applicable)**
```
...
2020-11-30 07:46:20.267 INFO 128 HapiQueryOp - 'updateApiPermissionsDuringReconnect' - Paying for HapiGetFileContents{sigs=1, payer=strong-control, node=0.0.6, file=API_PERMISSIONS, fileId=fileNum: 122
} with body=transactionID { transactionValidStart { seconds: 1606722320 nanos: 380 } accountID { accountNum: 50 } } nodeAccountID { accountNum: 6 } transactionFee: 84163 transactionValidDuration { seconds: 120 } memo: "\\303\\203\\302\\256\\303\\202\\302\\267\\303\\203\\302\\271tF8\\303\\202\\302\\256J\\303\\203\\302\\213\\303\\203\\302\\220\\303\\203\\302\\216" cryptoTransfer { transfers { accountAmounts { accountID { accountNum: 50 } amount: -8 } accountAmounts { accountID { accountNum: 6 } amount: 8 } } }; sigs=sigPair { pubKeyPrefix: "\\n" ed25519: "\\372l]\\231|*\\257\\035e\\212\\241?\\200\\265Q~u\\000I\\005J\\244\\202cf\\244\\242_\\024-\\344\\367\\351\\365xs9\\273OB=\\226?#\\220\\371\\326\\214\\243\\tdW\\274\\273^\\272\\036\\225\\312\\221C\\302\\t\\b" }
2020-11-30 07:46:20.383 INFO 155 HapiGetFileContents - API_PERMISSIONS contained 0 bytes
2020-11-30 07:46:20.383 INFO 164 HapiGetFileContents - As a config list, contents are:
2020-11-30 07:46:20.385 WARN 199 HapiSpecOperation - 'updateApiPermissionsDuringReconnect' - HapiGetFileContents{sigs=1, payer=strong-control, node=0.0.6, file=API_PERMISSIONS, fileId=fileNum: 122
} failed!
java.lang.AssertionError: Bad answerOnlyPrecheck! expected:<OK> but was:<PLATFORM_TRANSACTION_NOT_CREATED>
at org.junit.Assert.fail(Assert.java:88) ~[SuiteRunner.jar:?]
at org.junit.Assert.failNotEquals(Assert.java:834) ~[SuiteRunner.jar:?]
at org.junit.Assert.assertEquals(Assert.java:118) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.queries.HapiQueryOp.submitOp(HapiQueryOp.java:144) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.HapiSpecOperation.execFor(HapiSpecOperation.java:179) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.HapiApiSpec.exec(HapiApiSpec.java:222) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.HapiApiSpec.run(HapiApiSpec.java:176) ~[SuiteRunner.jar:?]
at java.util.AbstractList$RandomAccessSpliterator.forEachRemaining(AbstractList.java:720) ~[?:?]
at java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658) ~[?:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSync(HapiApiSuite.java:249) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSuite(HapiApiSuite.java:140) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSuiteSync(HapiApiSuite.java:133) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.lambda$runSuitesSync$15(SuiteRunner.java:490) ~[SuiteRunner.jar:?]
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:176) ~[?:?]
at java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948) ~[?:?]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484) ~[?:?]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474) ~[?:?]
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913) ~[?:?]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:578) ~[?:?]
at com.hedera.services.bdd.suites.SuiteRunner.runSuitesSync(SuiteRunner.java:491) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.lambda$runTargetCategories$10(SuiteRunner.java:465) ~[SuiteRunner.jar:?]
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195) [?:?]
at java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1654) [?:?]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484) [?:?]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474) [?:?]
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913) [?:?]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) [?:?]
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:578) [?:?]
at com.hedera.services.bdd.suites.SuiteRunner.runTargetCategories(SuiteRunner.java:465) [SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.runCategories(SuiteRunner.java:434) [SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.main(SuiteRunner.java:358) [SuiteRunner.jar:?]
2020-11-30 07:46:20.390 INFO 244 HapiApiSpec - 'updateApiPermissionsDuringReconnect' - final status: FAILED!
2020-11-30 07:46:20.391 INFO 168 UpdateApiPermissionsDuringReconnect - -------------- RESULTS OF UpdateApiPermissionsDuringReconnect SUITE --------------
2020-11-30 07:46:20.391 INFO 170 UpdateApiPermissionsDuringReconnect - Spec{name=updateApiPermissionsDuringReconnect, status=FAILED}
2020-11-30 07:46:20.395 INFO 439 SuiteRunner - ============== sync run results ==============
2020-11-30 07:46:20.395 INFO 442 SuiteRunner - UpdateApiPermi...uringReconnect :: 0/1 suites ran OK
2020-11-30 07:46:20.395 INFO 448 SuiteRunner - --> Problems in suite 'UpdateApiPermissionsDuringReconnect' :: Spec{name=updateApiPermissionsDuringReconnect, status=FAILED}
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| a1becc9109e1724e5b02961e6e0821a9a51fd56e | f4b1559a3118e08e6c4918324571a47417639c64 | https://github.com/hashgraph/hedera-services/compare/a1becc9109e1724e5b02961e6e0821a9a51fd56e...f4b1559a3118e08e6c4918324571a47417639c64 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/reconnect/UpdateApiPermissionsDuringReconnect.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/reconnect/UpdateApiPermissionsDuringReconnect.java
index a251efa59b..e6679dfc5f 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/reconnect/UpdateApiPermissionsDuringReconnect.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/reconnect/UpdateApiPermissionsDuringReconnect.java
@@ -29,7 +29,9 @@ import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
+import java.time.Duration;
+import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor;
import static com.hedera.services.bdd.spec.HapiApiSpec.defaultHapiSpec;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents;
@@ -54,6 +56,7 @@ public class UpdateApiPermissionsDuringReconnect extends HapiApiSuite {
final String fileInfoRegistry = "apiPermissionsReconnect";
return defaultHapiSpec("updateApiPermissionsDuringReconnect")
.given(
+ sleepFor(Duration.ofSeconds(25).toMillis()),
getAccountBalance(GENESIS).setNode("0.0.6").unavailableNode()
)
.when( | ['test-clients/src/main/java/com/hedera/services/bdd/suites/reconnect/UpdateApiPermissionsDuringReconnect.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 7,097,755 | 1,657,537 | 190,316 | 1,019 | 151 | 35 | 3 | 1 | 5,987 | 361 | 1,795 | 72 | 2 | 1 | 1970-01-01T00:26:46 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,474 | hashgraph/hedera-services/1020/859 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/859 | https://github.com/hashgraph/hedera-services/pull/1020 | https://github.com/hashgraph/hedera-services/pull/1020 | 1 | closes | Account with positive balance must be created via a cryptoCreate | **Summary of the defect**
https://github.com/hashgraph/hedera-services/blob/1308f994ba95d4d76c004b319aafa33fa2fcac67/hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java#L81-L104
Only `GENESIS` account (also called `treasury` account) is created with `totalTinyBarFloat` during genesis. | 58049acd49d8ed56230b448455c2f71f1b5d345c | 401581d84a9213f3d0f8da9b520d6259b63cdfb7 | https://github.com/hashgraph/hedera-services/compare/58049acd49d8ed56230b448455c2f71f1b5d345c...401581d84a9213f3d0f8da9b520d6259b63cdfb7 | diff --git a/hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java b/hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java
index 6b8966e04d..3b3b1e59bf 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java
@@ -31,7 +31,6 @@ import com.hedera.services.legacy.core.jproto.JKey;
import com.hedera.services.exceptions.NegativeAccountBalanceException;
import com.hedera.services.state.merkle.MerkleAccount;
import com.hedera.services.state.merkle.MerkleEntityId;
-import com.hedera.services.utils.MiscUtils;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.Key;
import com.hederahashgraph.api.proto.java.KeyList;
@@ -78,16 +77,9 @@ public class BackedSystemAccountsCreator implements SystemAccountsCreator {
BackingStore<AccountID, MerkleAccount> accounts,
AddressBook addressBook
) {
- var nodeAccountNums = MiscUtils.getNodeAccountNums(addressBook);
-
long N = properties.getIntProperty("ledger.numSystemAccounts");
long expiry = properties.getLongProperty("bootstrap.system.entityExpiry");
long tinyBarFloat = properties.getLongProperty("ledger.totalTinyBarFloat");
- long nodeBalance = properties.getLongProperty("bootstrap.ledger.nodeAccounts.initialBalance");
- long defaultBalance = properties.getLongProperty("bootstrap.ledger.systemAccounts.initialBalance");
- long treasuryBalance = tinyBarFloat
- - (nodeBalance * nodeAccountNums.size())
- - (defaultBalance * (N - nodeAccountNums.size() - 1));
for (long num = 1; num <= N; num++) {
var id = idWith(num);
@@ -95,11 +87,9 @@ public class BackedSystemAccountsCreator implements SystemAccountsCreator {
continue;
}
if (num == accountNums.treasury()) {
- accounts.put(id, accountWith(treasuryBalance, expiry));
- } else if (nodeAccountNums.contains(num)) {
- accounts.put(id, accountWith(nodeBalance, expiry));
+ accounts.put(id, accountWith(tinyBarFloat, expiry));
} else {
- accounts.put(id, accountWith(defaultBalance, expiry));
+ accounts.put(id, accountWith(0, expiry));
}
}
diff --git a/hedera-node/src/test/java/com/hedera/services/state/initialization/BackedSystemAccountsCreatorTest.java b/hedera-node/src/test/java/com/hedera/services/state/initialization/BackedSystemAccountsCreatorTest.java
index 3e25e5ba55..3fc2e0d1fa 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/initialization/BackedSystemAccountsCreatorTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/initialization/BackedSystemAccountsCreatorTest.java
@@ -58,10 +58,7 @@ import static org.mockito.Mockito.verify;
class BackedSystemAccountsCreatorTest {
private long shard = 1;
private long realm = 2;
- private long nodeBalance = 10l;
- private long stdBalance = 5l;
- private long treasuryBalance = 80l;
- private long recordThresholds = 100l;
+ private long totalBalance = 100l;
private long expiry = Instant.now().getEpochSecond() + 1_234_567L;
private int numAccounts = 4;
private String b64Loc = "somewhere";
@@ -98,11 +95,7 @@ class BackedSystemAccountsCreatorTest {
given(properties.getIntProperty("ledger.numSystemAccounts"))
.willReturn(numAccounts);
given(properties.getLongProperty("ledger.totalTinyBarFloat"))
- .willReturn(100L);
- given(properties.getLongProperty("bootstrap.ledger.nodeAccounts.initialBalance"))
- .willReturn(10L);
- given(properties.getLongProperty("bootstrap.ledger.systemAccounts.initialBalance"))
- .willReturn(5L);
+ .willReturn(totalBalance);
given(properties.getStringProperty("bootstrap.genesisB64Keystore.keyName"))
.willReturn(legacyId);
given(properties.getStringProperty("bootstrap.genesisB64Keystore.path"))
@@ -122,10 +115,10 @@ class BackedSystemAccountsCreatorTest {
accountWith(2),
accountWith(3),
accountWith(4)));
- given(backingAccounts.getUnsafeRef(accountWith(1))).willReturn(withExpectedBalance(stdBalance));
- given(backingAccounts.getUnsafeRef(accountWith(2))).willReturn(withExpectedBalance(treasuryBalance));
- given(backingAccounts.getUnsafeRef(accountWith(3))).willReturn(withExpectedBalance(nodeBalance));
- given(backingAccounts.getUnsafeRef(accountWith(4))).willReturn(withExpectedBalance(stdBalance));
+ given(backingAccounts.getUnsafeRef(accountWith(1))).willReturn(withExpectedBalance(0));
+ given(backingAccounts.getUnsafeRef(accountWith(2))).willReturn(withExpectedBalance(totalBalance));
+ given(backingAccounts.getUnsafeRef(accountWith(3))).willReturn(withExpectedBalance(0));
+ given(backingAccounts.getUnsafeRef(accountWith(4))).willReturn(withExpectedBalance(0));
subject = new BackedSystemAccountsCreator(
hederaNums,
@@ -136,11 +129,11 @@ class BackedSystemAccountsCreatorTest {
@Test
public void throwsOnNegativeBalance() {
- givenMissingNode();
+ givenMissingTreasury();
// and:
given(legacyReader.hexedABytesFrom(b64Loc, legacyId)).willReturn(hexedABytes);
// and:
- given(properties.getLongProperty("bootstrap.ledger.nodeAccounts.initialBalance"))
+ given(properties.getLongProperty("ledger.totalTinyBarFloat"))
.willReturn(-100L);
// expect:
@@ -177,7 +170,7 @@ class BackedSystemAccountsCreatorTest {
subject.ensureSystemAccounts(backingAccounts, book);
// then:
- verify(backingAccounts).put(accountWith(3), withExpectedBalance(nodeBalance));
+ verify(backingAccounts).put(accountWith(3), withExpectedBalance(0));
}
@Test
@@ -190,7 +183,7 @@ class BackedSystemAccountsCreatorTest {
subject.ensureSystemAccounts(backingAccounts, book);
// then:
- verify(backingAccounts).put(accountWith(4), withExpectedBalance(stdBalance));
+ verify(backingAccounts).put(accountWith(4), withExpectedBalance(0));
}
@Test
@@ -203,7 +196,7 @@ class BackedSystemAccountsCreatorTest {
subject.ensureSystemAccounts(backingAccounts, book);
// then:
- verify(backingAccounts).put(accountWith(2), withExpectedBalance(treasuryBalance));
+ verify(backingAccounts).put(accountWith(2), withExpectedBalance(totalBalance));
}
@Test
@@ -231,7 +224,7 @@ class BackedSystemAccountsCreatorTest {
verify(backingAccounts, never()).put(any(), any());
// and:
verify(BackedSystemAccountsCreator.log).info(String.format(
- "Ledger float is %d tinyBars in %d accounts.", 100, 4));
+ "Ledger float is %d tinyBars in %d accounts.", totalBalance, 4));
// cleanup:
BackedSystemAccountsCreator.log = LogManager.getLogger(BackedSystemAccountsCreator.class); | ['hedera-node/src/main/java/com/hedera/services/state/initialization/BackedSystemAccountsCreator.java', 'hedera-node/src/test/java/com/hedera/services/state/initialization/BackedSystemAccountsCreatorTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 5,785,082 | 1,385,655 | 163,098 | 1,026 | 791 | 185 | 14 | 1 | 337 | 18 | 96 | 4 | 1 | 0 | 1970-01-01T00:26:51 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,472 | hashgraph/hedera-services/1093/1092 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1092 | https://github.com/hashgraph/hedera-services/pull/1093 | https://github.com/hashgraph/hedera-services/pull/1093 | 1 | closes | Exported balances CSV using SingleAccountBalances type in 0.12.0-rc.1 tag | **Summary of the defect**
In `v0.11.0` token balances in the account balances CSV are Base64-encoded serializations of [the `TokenBalances` type](https://github.com/hashgraph/hedera-services/blob/06381945ab3960f7efcc9228a12bc71dad153e02/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java#L239):
```
static String b64Encode(TokenBalances tokenBalances) {
return encoder.encodeToString(tokenBalances.toByteArray());
}
```
In `master` the Base64-encoding is of the `SingleAccountBalances` [type](https://github.com/hashgraph/hedera-services/blob/master/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java#L364):
```
static String b64Encode(SingleAccountBalances accountBalances) {
return encoder.encodeToString(accountBalances.toByteArray());
}
``` | 2ceb58f0d8a3687749b5366d38197a6d1340e788 | 0394e874f0882c060013ac0e25c595e87b740b4a | https://github.com/hashgraph/hedera-services/compare/2ceb58f0d8a3687749b5366d38197a6d1340e788...0394e874f0882c060013ac0e25c595e87b740b4a | diff --git a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
index 4f00558ef4..12acda2796 100644
--- a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
+++ b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
@@ -126,8 +126,7 @@ public class ServicesMain implements SwirldMain {
}
if (ctx.globalDynamicProperties().shouldExportBalances() && ctx.balancesExporter().isTimeToExport(when)) {
try {
- ctx.balancesExporter().toCsvFile((ServicesState) signedState, when);
- ctx.balancesExporter().toProtoFile((ServicesState) signedState, when);
+ ctx.balancesExporter().exportBalancesFrom((ServicesState) signedState, when);
} catch (IllegalStateException ise) {
log.error("HederaNode#{} has invalid total balance in signed state, exiting!", ctx.id(), ise);
systemExits.fail(1);
diff --git a/hedera-node/src/main/java/com/hedera/services/state/exports/BalancesExporter.java b/hedera-node/src/main/java/com/hedera/services/state/exports/BalancesExporter.java
index e80d73aba9..cbcde98fa9 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/exports/BalancesExporter.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/exports/BalancesExporter.java
@@ -26,6 +26,5 @@ import java.time.Instant;
public interface BalancesExporter {
boolean isTimeToExport(Instant now);
- void toCsvFile(ServicesState signedState, Instant when);
- void toProtoFile(ServicesState signedState, Instant when);
+ void exportBalancesFrom(ServicesState signedState, Instant when);
}
diff --git a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
index f162b370df..e50a3e7329 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
@@ -9,9 +9,9 @@ package com.hedera.services.state.exports;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -34,15 +34,16 @@ import com.hederahashgraph.api.proto.java.AccountID;
import com.hedera.services.stream.proto.AllAccountBalances;
import com.hedera.services.stream.proto.SingleAccountBalances;
import com.hedera.services.stream.proto.TokenUnitBalance;
+import com.hederahashgraph.api.proto.java.TokenBalance;
+import com.hederahashgraph.api.proto.java.TokenBalances;
import com.hederahashgraph.api.proto.java.TokenID;
-import com.swirlds.common.Units;
import com.swirlds.fcmap.FCMap;
+import org.apache.commons.lang3.time.StopWatch;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.BufferedWriter;
import java.io.File;
-import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.Writer;
@@ -54,8 +55,8 @@ import java.util.ArrayList;
import java.util.Base64;
import java.util.Comparator;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import java.util.function.UnaryOperator;
-import java.util.Optional;
import static com.hedera.services.state.merkle.MerkleEntityAssociation.fromAccountTokenRel;
import static com.hedera.services.state.merkle.MerkleEntityId.fromTokenId;
@@ -65,34 +66,37 @@ import static com.hedera.services.ledger.HederaLedger.ACCOUNT_ID_COMPARATOR;
public class SignedStateBalancesExporter implements BalancesExporter {
static Logger log = LogManager.getLogger(SignedStateBalancesExporter.class);
- static final String LINE_SEPARATOR = System.getProperty("line.separator");
- static final String UNKNOWN_EXPORT_DIR = "";
+ private static final String LINE_SEPARATOR = System.getProperty("line.separator");
+ private static final String UNKNOWN_EXPORT_DIR = "";
+ private static final String BAD_EXPORT_ATTEMPT_ERROR_MSG_TPL = "Could not export to '%s'!";
+ private static final String BAD_SIGNING_ATTEMPT_ERROR_MSG_TPL = "Could not sign balance file '%s'!";
static final String BAD_EXPORT_DIR_ERROR_MSG_TPL = "Cannot ensure existence of export dir '%s'!";
static final String LOW_NODE_BALANCE_WARN_MSG_TPL = "Node '%s' has unacceptably low balance %d!";
- static final String BAD_EXPORT_ATTEMPT_ERROR_MSG_TPL = "Could not export to '%s'!";
- static final String BAD_SIGNING_ATTEMPT_ERROR_MSG_TPL = "Could not sign balance file '%s'!";
static final String GOOD_SIGNING_ATTEMPT_DEBUG_MSG_TPL = "Created balance signature file '%s'.";
static final String CURRENT_VERSION = "version:2";
- static final String PROTOBUF_FILE_EXTENSION = ".pb";
- static final String CSV_FILE_EXTENSION = ".csv";
+ private static final String PROTO_FILE_EXTENSION = ".pb";
+ private static final String CSV_FILE_EXTENSION = ".csv";
static final Instant NEVER = null;
- static final Base64.Encoder encoder = Base64.getEncoder();
+ private static final Base64.Encoder encoder = Base64.getEncoder();
final long expectedFloat;
- final UnaryOperator<byte[]> signer;
- final GlobalDynamicProperties dynamicProperties;
+ private final UnaryOperator<byte[]> signer;
+ private final GlobalDynamicProperties dynamicProperties;
+ /* Used to toggle output for testing. */
+ boolean exportCsv = true, exportProto = true;
SigFileWriter sigFileWriter = new StandardSigFileWriter();
FileHashReader hashReader = new Sha384HashReader();
DirectoryAssurance directories = loc -> Files.createDirectories(Paths.get(loc));
- String lastUsedExportDir = UNKNOWN_EXPORT_DIR;
- Instant periodEnd = NEVER;
+ private String lastUsedExportDir = UNKNOWN_EXPORT_DIR;
+ private BalancesSummary summary;
+ Instant periodEnd = NEVER;
- public static final Comparator<SingleAccountBalances> SINGLE_ACCOUNT_BALANCES_COMPARATOR =
+ static final Comparator<SingleAccountBalances> SINGLE_ACCOUNT_BALANCES_COMPARATOR =
Comparator.comparing(SingleAccountBalances::getAccountID, ACCOUNT_ID_COMPARATOR);
public SignedStateBalancesExporter(
@@ -119,57 +123,53 @@ public class SignedStateBalancesExporter implements BalancesExporter {
}
@Override
- public void toCsvFile(ServicesState signedState, Instant exportTimeStamp) {
+ public void exportBalancesFrom(ServicesState signedState, Instant when) {
if (!ensureExportDir(signedState.getNodeAccountId())) {
return;
}
- long startTime = System.nanoTime();
- var summary = summarized(signedState);
+ var watch = StopWatch.createStarted();
+ summary = summarized(signedState);
var expected = BigInteger.valueOf(expectedFloat);
if (!expected.equals(summary.getTotalFloat())) {
throw new IllegalStateException(String.format(
"Signed state @ %s had total balance %d not %d!",
- exportTimeStamp,
- summary.getTotalFloat(),
- expectedFloat));
+ when, summary.getTotalFloat(), expectedFloat)); }
+ log.info("Took {}ms to summarize signed state balances", watch.getTime(TimeUnit.MILLISECONDS));
+
+ if (exportCsv) {
+ toCsvFile(when);
+ }
+ if (exportProto) {
+ toProtoFile(when);
}
+ }
+
+ private void toCsvFile(Instant exportTimeStamp) {
+ var watch = StopWatch.createStarted();
+
var csvLoc = lastUsedExportDir
+ exportTimeStamp.toString().replace(":", "_") + "_Balances" + CSV_FILE_EXTENSION;
boolean exportSucceeded = exportBalancesFile(summary, csvLoc, exportTimeStamp);
if (exportSucceeded) {
tryToSign(csvLoc);
}
- log.info("It took total {} Millisecond to export and sign the csv account files",
- (System.nanoTime() - startTime) * Units.NANOSECONDS_TO_MILLISECONDS);
- }
-
- @Override
- public void toProtoFile(ServicesState signedState, Instant exportTimeStamp) {
- if (!ensureExportDir(signedState.getNodeAccountId())) {
- return;
- }
- long startTime = System.nanoTime();
- AllAccountBalances.Builder allAccountBalancesBuilder = AllAccountBalances.newBuilder();
+ log.info(" -> Took {}ms to export and sign CSV balances file", watch.getTime(TimeUnit.MILLISECONDS));
+ }
- var expected = BigInteger.valueOf(expectedFloat);
- var total = calcTotalAndBuildProtoMessage(signedState, exportTimeStamp, allAccountBalancesBuilder);
-
- if (!expected.equals(total)) {
- throw new IllegalStateException(String.format(
- "Signed state @ %s had total balance %d not %d!",
- exportTimeStamp,total, expectedFloat));
- }
+ private void toProtoFile(Instant exportTimeStamp) {
+ var watch = StopWatch.createStarted();
+ var builder = AllAccountBalances.newBuilder();
+ summarizeAsProto(exportTimeStamp, builder);
var protoLoc = lastUsedExportDir
- + exportTimeStamp.toString().replace(":", "_") + "_Balances" + PROTOBUF_FILE_EXTENSION;
- boolean exportSucceeded = exportBalancesProtoFile(allAccountBalancesBuilder, protoLoc);
-
+ + exportTimeStamp.toString().replace(":", "_") + "_Balances" + PROTO_FILE_EXTENSION;
+ boolean exportSucceeded = exportBalancesProtoFile(builder, protoLoc);
if (exportSucceeded) {
tryToSign(protoLoc);
}
- log.info("It took total {} Millisecond to export and sign the proto account files",
- (System.nanoTime() - startTime) * Units.NANOSECONDS_TO_MILLISECONDS);
+
+ log.info(" -> Took {}ms to export and sign proto balances file", watch.getTime(TimeUnit.MILLISECONDS));
}
private void tryToSign(String csvLoc) {
@@ -192,7 +192,7 @@ public class SignedStateBalancesExporter implements BalancesExporter {
} else {
addLegacyHeader(fout, when);
}
- for (SingleAccountBalances singleAccountBalances : summary.getOrderedBalances()) {
+ for (SingleAccountBalances singleAccountBalances : summary.getOrderedBalances()) {
fout.write(String.format(
"%d,%d,%d,%d",
singleAccountBalances.getAccountID().getShardNum(),
@@ -200,10 +200,9 @@ public class SignedStateBalancesExporter implements BalancesExporter {
singleAccountBalances.getAccountID().getAccountNum(),
singleAccountBalances.getHbarBalance()));
if (dynamicProperties.shouldExportTokenBalances()) {
- if(singleAccountBalances.getTokenUnitBalancesList().size() > 0) {
+ if (singleAccountBalances.getTokenUnitBalancesList().size() > 0) {
fout.write("," + b64Encode(singleAccountBalances));
- }
- else {
+ } else {
fout.write(",");
}
}
@@ -216,62 +215,14 @@ public class SignedStateBalancesExporter implements BalancesExporter {
return true;
}
- private BigInteger calcTotalAndBuildProtoMessage(ServicesState signedState, Instant exportTimeStamp,
- AllAccountBalances.Builder allAccountBalancesBuilder) {
-
- long nodeBalanceWarnThreshold = dynamicProperties.nodeBalanceWarningThreshold();
- BigInteger totalFloat = BigInteger.valueOf(0L);
-
- var nodeIds = MiscUtils.getNodeAccounts(signedState.addressBook());
- var tokens = signedState.tokens();
- var accounts = signedState.accounts();
- var tokenAssociations = signedState.tokenAssociations();
-
- for (MerkleEntityId id : accounts.keySet()) {
- var account = accounts.get(id);
- if (!account.isDeleted()) {
- var accountId = id.toAccountId();
- var balance = account.getBalance();
- if (nodeIds.contains(accountId) && balance < nodeBalanceWarnThreshold) {
- log.warn(String.format(
- LOW_NODE_BALANCE_WARN_MSG_TPL,
- readableId(accountId),
- balance));
- }
- totalFloat = totalFloat.add(BigInteger.valueOf(account.getBalance()));
-
- SingleAccountBalances.Builder singleAccountBuilder =SingleAccountBalances.newBuilder();
- singleAccountBuilder.setAccountID(accountId)
- .setHbarBalance(balance);
-
- if (dynamicProperties.shouldExportTokenBalances()) {
- var accountTokens = account.tokens();
- if (accountTokens.numAssociations() > 0) {
- for (TokenID tokenId : accountTokens.asIds()) {
- var token = tokens.get(fromTokenId(tokenId));
- if (token != null && !token.isDeleted()) {
- var relationship = tokenAssociations
- .get(fromAccountTokenRel(accountId, tokenId));
- singleAccountBuilder.addTokenUnitBalances(tb(tokenId, relationship.getBalance()));
- }
- }
- }
- }
- Timestamp.Builder consensusTimeStamp = Timestamp.newBuilder();
- consensusTimeStamp.setSeconds(exportTimeStamp.getEpochSecond())
- .setNanos(exportTimeStamp.getNano());
- allAccountBalancesBuilder.setConsensusTimestamp(consensusTimeStamp.build());
- allAccountBalancesBuilder.addAllAccounts(singleAccountBuilder.build());
- }
- }
- return totalFloat;
+ private void summarizeAsProto(Instant exportTimeStamp, AllAccountBalances.Builder builder) {
+ builder.setConsensusTimestamp(Timestamp.newBuilder()
+ .setSeconds(exportTimeStamp.getEpochSecond())
+ .setNanos(exportTimeStamp.getNano()));
+ builder.addAllAllAccounts(summary.getOrderedBalances());
}
private boolean exportBalancesProtoFile(AllAccountBalances.Builder allAccountsBuilder, String protoLoc) {
- if(log.isDebugEnabled()) {
- log.debug("Export all accounts to protobuf file {} ", protoLoc);
- }
-
try (FileOutputStream fout = new FileOutputStream(protoLoc)) {
allAccountsBuilder.build().writeTo(fout);
} catch (IOException e) {
@@ -281,17 +232,6 @@ public class SignedStateBalancesExporter implements BalancesExporter {
return true;
}
- public Optional<AllAccountBalances> importBalanceProtoFile(String protoLoc) {
- try {
- FileInputStream fin = new FileInputStream(protoLoc);
- AllAccountBalances allAccountBalances = AllAccountBalances.parseFrom(fin);
- return Optional.ofNullable(allAccountBalances);
- } catch (IOException e) {
- log.error("Can't read protobuf message file {}", protoLoc);
- }
- return Optional.empty();
- }
-
private void addLegacyHeader(Writer writer, Instant at) throws IOException {
writer.write(String.format("TimeStamp:%s%s", at, LINE_SEPARATOR));
writer.write("shardNum,realmNum,accountNum,balance" + LINE_SEPARATOR);
@@ -312,7 +252,7 @@ public class SignedStateBalancesExporter implements BalancesExporter {
var tokens = signedState.tokens();
var accounts = signedState.accounts();
var tokenAssociations = signedState.tokenAssociations();
- for (MerkleEntityId id : accounts.keySet()) {
+ for (MerkleEntityId id : accounts.keySet()) {
var account = accounts.get(id);
if (!account.isDeleted()) {
var accountId = id.toAccountId();
@@ -328,7 +268,7 @@ public class SignedStateBalancesExporter implements BalancesExporter {
sabBuilder.setHbarBalance(balance)
.setAccountID(accountId);
if (dynamicProperties.shouldExportTokenBalances()) {
- addTokenBalances(accountId, account, sabBuilder, tokens, tokenAssociations);
+ addTokenBalances(accountId, account, sabBuilder, tokens, tokenAssociations);
}
accountBalances.add(sabBuilder.build());
}
@@ -345,14 +285,11 @@ public class SignedStateBalancesExporter implements BalancesExporter {
FCMap<MerkleEntityAssociation, MerkleTokenRelStatus> tokenAssociations
) {
var accountTokens = account.tokens();
- if (accountTokens.numAssociations() > 0) {
-
- for (TokenID tokenId : accountTokens.asIds()) {
- var token = tokens.get(fromTokenId(tokenId));
- if (token != null && !token.isDeleted()) {
- var relationship = tokenAssociations.get(fromAccountTokenRel(id, tokenId));
- sabBuilder.addTokenUnitBalances(tb(tokenId, relationship.getBalance()));
- }
+ for (TokenID tokenId : accountTokens.asIds()) {
+ var token = tokens.get(fromTokenId(tokenId));
+ if (token != null && !token.isDeleted()) {
+ var relationship = tokenAssociations.get(fromAccountTokenRel(id, tokenId));
+ sabBuilder.addTokenUnitBalances(tb(tokenId, relationship.getBalance()));
}
}
}
@@ -362,7 +299,13 @@ public class SignedStateBalancesExporter implements BalancesExporter {
}
static String b64Encode(SingleAccountBalances accountBalances) {
- return encoder.encodeToString(accountBalances.toByteArray());
+ var wrapper = TokenBalances.newBuilder();
+ for (TokenUnitBalance tokenUnitBalance : accountBalances.getTokenUnitBalancesList()) {
+ wrapper.addTokenBalances(TokenBalance.newBuilder()
+ .setTokenId(tokenUnitBalance.getTokenId())
+ .setBalance(tokenUnitBalance.getBalance()));
+ }
+ return encoder.encodeToString(wrapper.build().toByteArray());
}
private boolean ensureExportDir(AccountID node) {
diff --git a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
index f9a97391b4..0d38db1e42 100644
--- a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
@@ -522,8 +522,7 @@ public class ServicesMainTest {
subject.newSignedState(signedState, when, 1L);
// then:
- verify(balancesExporter, never()).toCsvFile(any(), any());
- verify(balancesExporter, never()).toProtoFile(any(), any());
+ verify(balancesExporter, never()).exportBalancesFrom(any(), any());
}
@Test
@@ -540,8 +539,7 @@ public class ServicesMainTest {
subject.newSignedState(signedState, when, 1L);
// then:
- verify(balancesExporter).toCsvFile(signedState, when);
- verify(balancesExporter).toProtoFile(signedState, when);
+ verify(balancesExporter).exportBalancesFrom(signedState, when);
}
@Test
@@ -569,7 +567,9 @@ public class ServicesMainTest {
given(globalDynamicProperties.shouldExportBalances()).willReturn(true);
given(balancesExporter.isTimeToExport(when)).willReturn(true);
- willThrow(IllegalStateException.class).given(balancesExporter).toCsvFile(signedState, when);
+ willThrow(IllegalStateException.class)
+ .given(balancesExporter)
+ .exportBalancesFrom(signedState, when);
// when:
subject.newSignedState(signedState, when, 1L);
diff --git a/hedera-node/src/test/java/com/hedera/services/sigs/factories/SigFactoryCreatorTest.java b/hedera-node/src/test/java/com/hedera/services/sigs/factories/SigFactoryCreatorTest.java
index e364453f86..64df69775e 100644
--- a/hedera-node/src/test/java/com/hedera/services/sigs/factories/SigFactoryCreatorTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/sigs/factories/SigFactoryCreatorTest.java
@@ -21,6 +21,7 @@ package com.hedera.services.sigs.factories;
*/
import com.google.protobuf.ByteString;
+import com.google.protobuf.InvalidProtocolBufferException;
import com.hedera.services.state.merkle.MerkleEntityId;
import com.hedera.services.state.merkle.MerkleSchedule;
import com.hedera.services.utils.SignedTxnAccessor;
@@ -32,6 +33,7 @@ import com.hederahashgraph.api.proto.java.ScheduleCreateTransactionBody;
import com.hederahashgraph.api.proto.java.ScheduleID;
import com.hederahashgraph.api.proto.java.ScheduleSignTransactionBody;
import com.hederahashgraph.api.proto.java.SignedTransaction;
+import com.hederahashgraph.api.proto.java.TokenBalances;
import com.hederahashgraph.api.proto.java.Transaction;
import com.hederahashgraph.api.proto.java.TransactionBody;
import com.hederahashgraph.api.proto.java.TransferList;
@@ -41,6 +43,8 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import java.util.Base64;
+
import static com.hedera.services.sigs.factories.PlatformSigFactoryTest.pk;
import static com.hedera.services.sigs.factories.PlatformSigFactoryTest.sig;
import static com.hedera.services.state.merkle.MerkleEntityId.fromScheduleId;
diff --git a/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java b/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
index ea44c84f15..5f9aecfe6f 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
@@ -20,6 +20,7 @@ package com.hedera.services.state.exports;
*
*/
+import com.google.protobuf.InvalidProtocolBufferException;
import com.hedera.services.ServicesState;
import com.hedera.services.config.MockGlobalDynamicProps;
import com.hedera.services.context.properties.GlobalDynamicProperties;
@@ -35,6 +36,7 @@ import com.hederahashgraph.api.proto.java.AccountID;
import com.hedera.services.stream.proto.AllAccountBalances;
import com.hedera.services.stream.proto.SingleAccountBalances;
import com.hedera.services.stream.proto.TokenUnitBalance;
+import com.hederahashgraph.api.proto.java.TokenBalances;
import com.hederahashgraph.api.proto.java.TokenID;
import com.swirlds.common.Address;
@@ -47,17 +49,18 @@ import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import java.io.File;
+import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.Base64;
+import java.util.Base64.Decoder;
import java.util.Comparator;
import java.util.List;
import java.util.function.UnaryOperator;
-import java.util.regex.Pattern;
import java.util.Optional;
import static com.hedera.services.state.exports.SignedStateBalancesExporter.GOOD_SIGNING_ATTEMPT_DEBUG_MSG_TPL;
@@ -93,16 +96,12 @@ class SignedStateBalancesExporterTest {
long thisNodeBalance = 400;
AccountID thisNode = asAccount("0.0.3");
-
long anotherNodeBalance = 100;
AccountID anotherNode = asAccount("0.0.4");
-
long firstNonNodeAccountBalance = 250;
AccountID firstNonNode = asAccount("0.0.1001");
-
long secondNonNodeAccountBalance = 250;
AccountID secondNonNode = asAccount("0.0.1002");
-
AccountID deleted = asAccount("0.0.1003");
TokenID theToken = asToken("0.0.1004");
@@ -212,7 +211,8 @@ class SignedStateBalancesExporterTest {
subject.directories = assurance;
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(mockLog).error(any(String.class), any(Throwable.class));
@@ -226,7 +226,8 @@ class SignedStateBalancesExporterTest {
given(hashReader.readHash(loc)).willThrow(IllegalStateException.class);
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(mockLog).error(any(String.class), any(Throwable.class));
@@ -235,12 +236,81 @@ class SignedStateBalancesExporterTest {
new File(loc).delete();
}
+ @Test
+ public void matchesV2OutputForCsv() throws IOException {
+ // setup:
+ ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
+ // and:
+ var loc = expectedExportLoc();
+ // and:
+ accounts.clear();
+ accounts.put(
+ new MerkleEntityId(0, 0, 1),
+ MerkleAccountFactory.newAccount().get());
+ accounts.put(
+ new MerkleEntityId(0, 0, 2),
+ MerkleAccountFactory.newAccount()
+ .balance(4999999999999999920L)
+ .tokens(asToken("0.0.1001"), asToken("0.0.1002"))
+ .get());
+ accounts.put(
+ new MerkleEntityId(0, 0, 3),
+ MerkleAccountFactory.newAccount()
+ .balance(80L)
+ .tokens(asToken("0.0.1002"))
+ .get());
+ // and:
+ tokenRels.clear();
+ tokenRels.put(
+ new MerkleEntityAssociation(0, 0, 2, 0, 0, 1001),
+ new MerkleTokenRelStatus(666L, false, false));
+ tokenRels.put(
+ new MerkleEntityAssociation(0, 0, 2, 0, 0, 1002),
+ new MerkleTokenRelStatus(444L, false, false));
+ tokenRels.put(
+ new MerkleEntityAssociation(0, 0, 3, 0, 0, 1002),
+ new MerkleTokenRelStatus(333L, false, false));
+ // and:
+ tokens.clear();
+ tokens.put(new MerkleEntityId(0, 0, 1001), token);
+ tokens.put(new MerkleEntityId(0, 0, 1002), token);
+
+ given(hashReader.readHash(loc)).willReturn(fileHash);
+ given(sigFileWriter.writeSigFile(captor.capture(), any(), any())).willReturn(loc + "_sig");
+ given(properties.getLongProperty("ledger.totalTinyBarFloat"))
+ .willReturn(5000000000000000000L);
+ given(signer.apply(fileHash)).willReturn(sig);
+ // and:
+ subject = new SignedStateBalancesExporter(properties, signer, dynamicProperties);
+ subject.sigFileWriter = sigFileWriter;
+ subject.hashReader = hashReader;
+
+ // when:
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
+
+ // then:
+ var lines = Files.readAllLines(Paths.get(loc));
+ assertEquals(6, lines.size());
+ assertEquals(String.format("# " + SignedStateBalancesExporter.CURRENT_VERSION, now), lines.get(0));
+ assertEquals(String.format("# TimeStamp:%s", now), lines.get(1));
+ assertEquals("shardNum,realmNum,accountNum,balance,tokenBalances", lines.get(2));
+ assertEquals("0,0,1,0,", lines.get(3));
+ assertEquals("0,0,2,4999999999999999920,CggKAxjpBxCaBQoICgMY6gcQvAM=", lines.get(4));
+ assertEquals("0,0,3,80,CggKAxjqBxDNAg==", lines.get(5));
+ // and:
+ verify(sigFileWriter).writeSigFile(loc, sig, fileHash);
+ // and:
+ verify(mockLog).debug(String.format(GOOD_SIGNING_ATTEMPT_DEBUG_MSG_TPL, loc + "_sig"));
+
+ // cleanup:
+ new File(loc).delete();
+ }
+
@Test
public void usesNewFormatWhenExportingTokenBalances() throws IOException {
// setup:
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
- Pattern CSV_NAME_PATTERN = Pattern.compile(
- ".*\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}_\\\\d{2}_\\\\d{2}[.]\\\\d{9}Z_Balances.csv");
// and:
var loc = expectedExportLoc();
@@ -248,7 +318,8 @@ class SignedStateBalancesExporterTest {
given(sigFileWriter.writeSigFile(captor.capture(), any(), any())).willReturn(loc + "_sig");
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
var lines = Files.readAllLines(Paths.get(loc));
@@ -271,8 +342,6 @@ class SignedStateBalancesExporterTest {
verify(sigFileWriter).writeSigFile(loc, sig, fileHash);
// and:
verify(mockLog).debug(String.format(GOOD_SIGNING_ATTEMPT_DEBUG_MSG_TPL, loc + "_sig"));
- // and:
-// assertTrue(CSV_NAME_PATTERN.matcher(captor.getValue()).matches());
// cleanup:
new File(loc).delete();
@@ -292,7 +361,8 @@ class SignedStateBalancesExporterTest {
subject.hashReader = hashReader;
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
var lines = Files.readAllLines(Paths.get(expectedExportLoc()));
@@ -318,8 +388,6 @@ class SignedStateBalancesExporterTest {
public void testExportingTokenBalancesProto() throws IOException {
// setup:
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
- Pattern PB_NAME_PATTERN = Pattern.compile(
- ".*\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}_\\\\d{2}_\\\\d{2}[.]\\\\d{9}Z_Balances.pb");
// and:
var loc = expectedExportLoc(true);
@@ -327,23 +395,23 @@ class SignedStateBalancesExporterTest {
given(sigFileWriter.writeSigFile(captor.capture(), any(), any())).willReturn(loc + "_sig");
// when:
- subject.toProtoFile(state, now);
+ subject.exportCsv = false;
+ subject.exportBalancesFrom(state, now);
// and:
- java.util.Optional<AllAccountBalances> fileContent = subject.importBalanceProtoFile(loc);
+ java.util.Optional<AllAccountBalances> fileContent = importBalanceProtoFile(loc);
- AllAccountBalances allAccountBalances = fileContent.get() ;
+ AllAccountBalances allAccountBalances = fileContent.get();
// then:
List<SingleAccountBalances> accounts = allAccountBalances.getAllAccountsList();
assertEquals(accounts.size(), 4);
- for(SingleAccountBalances account : accounts) {
- if(account.getAccountID().getAccountNum() == 1001) {
+ for (SingleAccountBalances account : accounts) {
+ if (account.getAccountID().getAccountNum() == 1001) {
assertEquals(account.getHbarBalance(), 250);
- }
- else if(account.getAccountID().getAccountNum() == 1002) {
+ } else if (account.getAccountID().getAccountNum() == 1002) {
assertEquals(account.getHbarBalance(), 250);
assertEquals(account.getTokenUnitBalances(0).getTokenId().getTokenNum(), 1004);
assertEquals(account.getTokenUnitBalances(0).getBalance(), 100);
@@ -354,8 +422,6 @@ class SignedStateBalancesExporterTest {
verify(sigFileWriter).writeSigFile(loc, sig, fileHash);
// and:
verify(mockLog).debug(String.format(GOOD_SIGNING_ATTEMPT_DEBUG_MSG_TPL, loc + "_sig"));
- // and:
-// assertTrue(CSV_NAME_PATTERN.matcher(captor.getValue()).matches());
// cleanup:
new File(loc).delete();
@@ -376,7 +442,8 @@ class SignedStateBalancesExporterTest {
subject.directories = assurance;
// when:
- subject.toProtoFile(state, now);
+ subject.exportCsv = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(mockLog).error(any(String.class), any(Throwable.class));
@@ -386,42 +453,29 @@ class SignedStateBalancesExporterTest {
@Test
public void getEmptyAllAccountBalancesFromCorruptedProtoFileImport() throws Exception {
// setup:
- ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
- Pattern BAD_PB_NAME_PATTERN = Pattern.compile(
- ".*\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}_\\\\d{2}_\\\\d{2}[.]\\\\d{9}Z_Balances.pb");
- // and:
var loc = expectedExportLoc(true);
given(hashReader.readHash(loc)).willReturn(fileHash);
// when: Pretend the .csv file is a corrupted .pb file
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// and:
- java.util.Optional<AllAccountBalances> accounts = subject.importBalanceProtoFile(loc);
+ java.util.Optional<AllAccountBalances> accounts = importBalanceProtoFile(loc);
// then:
assertEquals(Optional.empty(), accounts);
}
- @Test
- public void throwsOnUnexpectedTotalFloatForProtoFile() throws NegativeAccountBalanceException {
- // given:
- anotherNodeAccount.setBalance(anotherNodeBalance + 1);
-
- // then:
- assertThrows(IllegalStateException.class, () -> subject.toProtoFile(state, now));
- }
-
-
-
@Test
public void assuresExpectedProtoFileDir() throws IOException {
// given:
subject.directories = assurance;
// when:
- subject.toProtoFile(state, now);
+ subject.exportCsv = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(assurance).ensureExistenceOf(expectedExportDir());
@@ -449,22 +503,19 @@ class SignedStateBalancesExporterTest {
willThrow(IOException.class).given(assurance).ensureExistenceOf(any());
// when:
- subject.toProtoFile(state, now);
+ subject.exportCsv = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(mockLog).error(String.format(
SignedStateBalancesExporter.BAD_EXPORT_DIR_ERROR_MSG_TPL, expectedExportDir()));
}
- private String expectedBalancesName(Boolean isProto ) {
+ private String expectedBalancesName(Boolean isProto) {
return isProto ? now.toString().replace(":", "_") + "_Balances.pb"
: now.toString().replace(":", "_") + "_Balances.csv";
}
- private String expectedBalancesName() {
- return expectedBalancesName(false) ;
- }
-
@Test
public void testSingleAccountBalancingSort() {
// given:
@@ -534,7 +585,8 @@ class SignedStateBalancesExporterTest {
subject.directories = assurance;
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(assurance).ensureExistenceOf(expectedExportDir());
@@ -546,7 +598,8 @@ class SignedStateBalancesExporterTest {
anotherNodeAccount.setBalance(anotherNodeBalance + 1);
// then:
- assertThrows(IllegalStateException.class, () -> subject.toCsvFile(state, now));
+ assertThrows(IllegalStateException.class,
+ () -> subject.exportBalancesFrom(state, now));
}
@Test
@@ -557,7 +610,8 @@ class SignedStateBalancesExporterTest {
willThrow(IOException.class).given(assurance).ensureExistenceOf(any());
// when:
- subject.toCsvFile(state, now);
+ subject.exportProto = false;
+ subject.exportBalancesFrom(state, now);
// then:
verify(mockLog).error(String.format(
@@ -592,4 +646,15 @@ class SignedStateBalancesExporterTest {
.map(Path::toFile)
.forEach(File::delete);
}
+
+ static Optional<AllAccountBalances> importBalanceProtoFile(String protoLoc) {
+ try {
+ FileInputStream fin = new FileInputStream(protoLoc);
+ AllAccountBalances allAccountBalances = AllAccountBalances.parseFrom(fin);
+ return Optional.ofNullable(allAccountBalances);
+ } catch (IOException e) {
+ SignedStateBalancesExporter.log.error("Can't read protobuf message file {}", protoLoc);
+ }
+ return Optional.empty();
+ }
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenMiscOps.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenMiscOps.java
index 4e7f49c7bf..e71b01cc1a 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenMiscOps.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenMiscOps.java
@@ -21,6 +21,7 @@ package com.hedera.services.bdd.suites.token;
*/
import com.hedera.services.bdd.spec.HapiApiSpec;
+import com.hedera.services.bdd.spec.transactions.token.TokenMovement;
import com.hedera.services.bdd.suites.HapiApiSuite;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -38,6 +39,7 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate;
+import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed;
public class TokenMiscOps extends HapiApiSuite {
@@ -52,12 +54,38 @@ public class TokenMiscOps extends HapiApiSuite {
return allOf(
List.of(new HapiApiSpec[] {
// wellKnownAccountsHaveTokens(),
- someInfoQueries(),
+// someLowNumAccountsHaveTokens(),
+// someInfoQueries(),
+ theCreation(),
}
)
);
}
+ public HapiApiSpec someLowNumAccountsHaveTokens() {
+ long aSupply = 666L, bSupply = 777L;
+
+ return defaultHapiSpec("SomeLowNumAccountsHaveTokens")
+ .given(
+ tokenCreate("first").treasury(GENESIS).initialSupply(aSupply),
+ tokenCreate("second").treasury(GENESIS).initialSupply(bSupply)
+ ).when(
+ tokenAssociate("0.0.3", "second").signedBy(GENESIS),
+ cryptoTransfer(moving(aSupply / 2, "second")
+ .between(GENESIS, "0.0.3")).signedBy(GENESIS)
+ ).then(
+ getAccountInfo(GENESIS).logged(),
+ getAccountInfo("0.0.3").logged()
+ );
+ }
+
+ public HapiApiSpec theCreation() {
+ return defaultHapiSpec("TheCreation")
+ .given().when().then(
+ cryptoCreate("adam")
+ );
+ }
+
public HapiApiSpec someInfoQueries() {
return defaultHapiSpec("SomeInfoQueries")
.given().when().then( | ['hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java', 'hedera-node/src/main/java/com/hedera/services/ServicesMain.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenMiscOps.java', 'hedera-node/src/main/java/com/hedera/services/state/exports/BalancesExporter.java', 'hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java', 'hedera-node/src/test/java/com/hedera/services/sigs/factories/SigFactoryCreatorTest.java', 'hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 5,979,638 | 1,433,378 | 168,645 | 1,051 | 10,522 | 2,350 | 231 | 4 | 850 | 49 | 214 | 14 | 2 | 2 | 1970-01-01T00:26:53 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,471 | hashgraph/hedera-services/1098/1096 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1096 | https://github.com/hashgraph/hedera-services/pull/1098 | https://github.com/hashgraph/hedera-services/pull/1098 | 1 | closes | HTS-Restart-Performance-Random-10k-61m failed with Unhandled error while processing :: signedTransactionBytes | <!-- Thanks for submitting a bug report! Before submitting:
1. Try searching the existing issues to see if your issue has already been reported
2. If you're reporting a security vulnerability, please email security@hedera.com instead of opening an issue
-->
**Summary of the defect**
HTS-Restart-Performance-Random-10k-61m failed during TokenAssociateTransitionLogic.
**How to reproduce (if possible)**
Run test `HTS-Restart-Performance-Random-10k-61m`.
**Service logs (if applicable)**
```
2021-02-08 05:58:07.178 WARN 70 TokenAssociateTransitionLogic - Unhandled error while processing :: signedTransactionBytes: "\\nS\\n\\027\\n\\021\\b\\262\\245\\203\\201\\006\\020\\244\\373\\377\\377\\377\\377\\377\\377\\377\\001\\022\\002\\030\\002\\022\\002\\030\\003\\030\\200\\310\\257\\240%\\"\\002\\bx2 \\303\\203\\302\\256\\303\\202\\302\\267\\303\\203\\302\\271tF8\\303\\202\\302\\256J\\303\\203\\302\\213\\303\\203\\302\\220\\303\\203\\302\\216\\302\\002\\a\\n\\003\\030\\350U\\022\\000\\022G\\nE\\n\\001\\n\\032@\\331\\332\\353\\315T\\3452\\235\\0300\\202\\331Hn\\200w>\\317%\\211\\017S\\270\\374\\312\\322\\3464:\\226r\\371\\\\\\264\\246\\363\\351\\341\\375\\020@\\027\\372\\332.:\\226\\317\\260\\324\\317\\022K\\v\\a\\377n\\177X\\252\\323\\317\\337\\v"
!
java.lang.NullPointerException: null
at com.hedera.services.store.tokens.HederaTokenStore.fullySanityChecked(HederaTokenStore.java:616) ~[?:?]
at com.hedera.services.store.tokens.HederaTokenStore.associate(HederaTokenStore.java:150) ~[?:?]
at com.hedera.services.txns.token.TokenAssociateTransitionLogic.doStateTransition(TokenAssociateTransitionLogic.java:67) ~[?:?]
at com.hedera.services.legacy.services.state.AwareProcessLogic.doProcess(AwareProcessLogic.java:231) ~[?:?]
at com.hedera.services.legacy.services.state.AwareProcessLogic.processTxnInCtx(AwareProcessLogic.java:137) ~[?:?]
at com.hedera.services.state.logic.ServicesTxnManager.process(ServicesTxnManager.java:59) ~[?:?]
at com.hedera.services.legacy.services.state.AwareProcessLogic.incorporateConsensusTxn(AwareProcessLogic.java:107) ~[?:?]
at com.hedera.services.ServicesState.handleTransaction(ServicesState.java:307) ~[?:?]
at com.swirlds.platform.EventFlow.handleTransaction(EventFlow.java:731) ~[swirlds-platform-core-0.9.0-alpha.1.jar:?]
at com.swirlds.platform.EventFlow.takeHandlePut(EventFlow.java:1259) ~[swirlds-platform-core-0.9.0-alpha.1.jar:?]
at com.swirlds.platform.EventFlow.doCons(EventFlow.java:800) ~[swirlds-platform-core-0.9.0-alpha.1.jar:?]
at com.swirlds.common.threading.StoppableThread.run(StoppableThread.java:248) ~[swirlds-common-0.9.0-alpha.1.jar:?]
at java.lang.Thread.run(Thread.java:835) [?:?]
```
These are Warning Logs and should be ignored in `HederaNodeValidator`
Also Error logs `ERROR 158 HapiTxnOp - signedTransactionBytes:` mentioned in `HAPIClientValidator` are from `regression.log` and all tests client logs passed successfully.
Slack link : https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1612767653364100 | 0b92ce1301536e9f84f0222e7c94985a8ef9027f | 781a28f3bc334e4407ed7414f4a56180e4daf897 | https://github.com/hashgraph/hedera-services/compare/0b92ce1301536e9f84f0222e7c94985a8ef9027f...781a28f3bc334e4407ed7414f4a56180e4daf897 | diff --git a/hedera-node/src/main/java/com/hedera/services/store/schedule/HederaScheduleStore.java b/hedera-node/src/main/java/com/hedera/services/store/schedule/HederaScheduleStore.java
index 5e6806ed5c..adeebe7a25 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/schedule/HederaScheduleStore.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/schedule/HederaScheduleStore.java
@@ -96,7 +96,7 @@ public class HederaScheduleStore extends HederaStore implements ScheduleStore {
@Override
public boolean exists(ScheduleID id) {
- return pendingId.equals(id) || schedules.get().containsKey(fromScheduleId(id));
+ return (isCreationPending() && pendingId.equals(id)) || schedules.get().containsKey(fromScheduleId(id));
}
@Override
diff --git a/hedera-node/src/main/java/com/hedera/services/store/tokens/HederaTokenStore.java b/hedera-node/src/main/java/com/hedera/services/store/tokens/HederaTokenStore.java
index 467eee55fc..ef7cb80ce7 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/tokens/HederaTokenStore.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/tokens/HederaTokenStore.java
@@ -234,7 +234,7 @@ public class HederaTokenStore extends HederaStore implements TokenStore {
@Override
public boolean exists(TokenID id) {
- return pendingId.equals(id) || tokens.get().containsKey(fromTokenId(id));
+ return (isCreationPending() && pendingId.equals(id)) || tokens.get().containsKey(fromTokenId(id));
}
@Override
diff --git a/hedera-node/src/test/java/com/hedera/services/store/schedule/HederaScheduleStoreTest.java b/hedera-node/src/test/java/com/hedera/services/store/schedule/HederaScheduleStoreTest.java
index 2a77c9fd99..9dd6367c37 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/schedule/HederaScheduleStoreTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/schedule/HederaScheduleStoreTest.java
@@ -31,6 +31,7 @@ import com.hedera.services.state.merkle.MerkleEntityId;
import com.hedera.services.state.merkle.MerkleSchedule;
import com.hedera.services.state.submerkle.EntityId;
import com.hedera.services.state.submerkle.RichInstant;
+import com.hedera.services.store.tokens.HederaTokenStore;
import com.hedera.test.utils.IdUtils;
import com.hedera.test.utils.TxnUtils;
import com.hederahashgraph.api.proto.java.AccountID;
@@ -325,6 +326,12 @@ public class HederaScheduleStoreTest {
assertSame(schedule, subject.get(created));
}
+ @Test
+ public void existenceCheckUnderstandsPendingIdOnlyAppliesIfCreationPending() {
+ // expect:
+ assertFalse(subject.exists(HederaScheduleStore.NO_PENDING_ID));
+ }
+
@Test
public void rejectsCreateProvisionallyMissingPayer() {
// given:
diff --git a/hedera-node/src/test/java/com/hedera/services/store/tokens/HederaTokenStoreTest.java b/hedera-node/src/test/java/com/hedera/services/store/tokens/HederaTokenStoreTest.java
index 6445ee25be..8e19755618 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/tokens/HederaTokenStoreTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/tokens/HederaTokenStoreTest.java
@@ -361,6 +361,12 @@ class HederaTokenStoreTest {
assertSame(token, subject.get(pending));
}
+ @Test
+ public void existenceCheckUnderstandsPendingIdOnlyAppliesIfCreationPending() {
+ // expect:
+ assertFalse(subject.exists(HederaTokenStore.NO_PENDING_ID));
+ }
+
@Test
public void existenceCheckIncludesPending() {
// setup:
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
index f33ddf2c49..f62e96cab8 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
@@ -127,7 +127,6 @@ public class HapiGetScheduleInfo extends HapiQueryOp<HapiGetScheduleInfo> {
"Wrong schedule expiry!",
spec.registry());
-
var registry = spec.registry();
expectedSignatories.ifPresent(s -> {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
index d137acf786..612e7761d9 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
@@ -202,17 +202,18 @@ public abstract class HapiTxnOp<T extends HapiTxnOp<T>> extends HapiSpecOperatio
// "{} {} Wrong actual precheck status {}, not one of {}!",spec.logPrefix(), this,
// actualPrecheck,
// permissiblePrechecks.get());
- throw new HapiTxnPrecheckStateException(
- String.format("Wrong actual precheck status %s, expected %s", actualStatus,
- permissibleStatuses.get()));
+ throw new HapiTxnPrecheckStateException(String.format(
+ "Wrong precheck status! Expected one of %s, actual %s",
+ permissibleStatuses.get(), actualStatus));
}
} else {
if (getExpectedPrecheck() != actualPrecheck) {
// Change to an info until HapiClientValidator can be modified and can understand new errors
log.info("{} {} Wrong actual precheck status {}, expecting {}", spec.logPrefix(), this,
actualPrecheck, getExpectedPrecheck());
-// throw new HapiTxnPrecheckStateException(String.format("Wrong precheck status! expected %s, actual
-// %s", getExpectedPrecheck(), actualPrecheck));
+ throw new HapiTxnPrecheckStateException(String.format(
+ "Wrong precheck status! Expected %s, actual %s",
+ getExpectedPrecheck(), actualPrecheck));
}
}
}
@@ -262,16 +263,18 @@ public abstract class HapiTxnOp<T extends HapiTxnOp<T>> extends HapiSpecOperatio
"{} {} Wrong actual status {}, not one of {}!", spec.logPrefix(), this,
actualStatus,
permissibleStatuses.get());
- throw new HapiTxnCheckStateException(
- String.format("Wrong actual status %s, expected %s", actualStatus, permissibleStatuses.get()));
+ throw new HapiTxnCheckStateException(String.format(
+ "Wrong status! Expected one of %s, was %s",
+ permissibleStatuses.get(), actualStatus));
}
} else {
if (getExpectedStatus() != actualStatus) {
// Change to an info until HapiClientValidator can be modified and can understand new errors
log.info("{} {} Wrong actual status {}, expected {}", spec.logPrefix(), this, actualStatus,
getExpectedStatus());
-// throw new HapiTxnCheckStateException(String.format("Wrong actual status %s, expected %s", actualStatus,
-// getExpectedStatus()));
+ throw new HapiTxnCheckStateException(String.format(
+ "Wrong status! Expected %s, was %s",
+ getExpectedStatus(), actualStatus));
}
}
if (!deferStatusResolution) {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleCreateSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleCreateSpecs.java
index ff5b52cdaf..abdd2612be 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleCreateSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleCreateSpecs.java
@@ -62,6 +62,7 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.saveExpirations;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SCHEDULE_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MEMO_TOO_LONG;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SOME_SIGNATURES_WERE_INVALID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.UNPARSEABLE_SCHEDULED_TRANSACTION;
@@ -116,6 +117,7 @@ public class ScheduleCreateSpecs extends HapiApiSuite {
allowsDoublingScheduledCreates(),
scheduledTXCreatedAfterPreviousIdenticalIsExecuted(),
preservesRevocationServiceSemanticsForFileDelete(),
+ worksAsExpectedWithDefaultScheduleId(),
suiteCleanup(),
});
}
@@ -134,6 +136,13 @@ public class ScheduleCreateSpecs extends HapiApiSuite {
);
}
+ private HapiApiSpec worksAsExpectedWithDefaultScheduleId() {
+ return defaultHapiSpec("WorksAsExpectedWithDefaultScheduleId")
+ .given( ).when( ).then(
+ getScheduleInfo("0.0.0").hasCostAnswerPrecheck(INVALID_SCHEDULE_ID)
+ );
+ }
+
private HapiApiSpec bodyOnlyCreation() {
return defaultHapiSpec("BodyOnlyCreation")
.given( ).when(
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleDeleteSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleDeleteSpecs.java
index ae689bded0..ccd0df1425 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleDeleteSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleDeleteSpecs.java
@@ -167,6 +167,8 @@ public class ScheduleDeleteSpecs extends HapiApiSuite {
return defaultHapiSpec("DeletingNonExistingFails")
.given().when().then(
scheduleDelete("0.0.534")
+ .hasKnownStatus(INVALID_SCHEDULE_ID),
+ scheduleDelete("0.0.0")
.hasKnownStatus(INVALID_SCHEDULE_ID)
);
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java
index 1b65e79978..fc15d788cc 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java
@@ -101,10 +101,19 @@ public class TokenAssociationSpecs extends HapiApiSuite {
expiredAndDeletedTokensStillAppearInContractInfo(),
dissociationFromExpiredTokensAsExpected(),
accountInfoQueriesAsExpected(),
+ handlesUseOfDefaultTokenId(),
}
);
}
+ public HapiApiSpec handlesUseOfDefaultTokenId() {
+ return defaultHapiSpec("HandlesUseOfDefaultTokenId")
+ .given( ).when( ).then(
+ tokenAssociate(DEFAULT_PAYER, "0.0.0")
+ .hasKnownStatus(INVALID_TOKEN_ID)
+ );
+ }
+
public HapiApiSpec associatedContractsMustHaveAdminKeys() {
String misc = "someToken";
String contract = "defaultContract";
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenCreateSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenCreateSpecs.java
index e70dc04bbb..78a30ffc7a 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenCreateSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenCreateSpecs.java
@@ -38,6 +38,7 @@ import java.util.stream.IntStream;
import static com.hedera.services.bdd.spec.HapiApiSpec.defaultHapiSpec;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo;
+import static com.hedera.services.bdd.spec.queries.QueryVerbs.getScheduleInfo;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate;
@@ -80,10 +81,19 @@ public class TokenCreateSpecs extends HapiApiSuite {
creationSetsCorrectExpiry(),
creationHappyPath(),
numAccountsAllowedIsDynamic(),
+ worksAsExpectedWithDefaultTokenId(),
}
);
}
+
+ private HapiApiSpec worksAsExpectedWithDefaultTokenId() {
+ return defaultHapiSpec("WorksAsExpectedWithDefaultTokenId")
+ .given().when().then(
+ getTokenInfo("0.0.0").hasCostAnswerPrecheck(INVALID_TOKEN_ID)
+ );
+ }
+
public HapiApiSpec autoRenewValidationWorks() {
return defaultHapiSpec("AutoRenewValidationWorks")
.given(
@@ -334,7 +344,7 @@ public class TokenCreateSpecs extends HapiApiSuite {
.given(
cryptoCreate(TOKEN_TREASURY).balance(0L),
recordSystemProperty("tokens.maxSymbolUtf8Bytes", Integer::parseInt, maxUtf8Bytes::set)
- ).when( ).then(
+ ).when().then(
tokenCreate("missingSymbol")
.symbol("")
.hasPrecheck(MISSING_TOKEN_SYMBOL),
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenDeleteSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenDeleteSpecs.java
index 94615abfe0..dda82f2953 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenDeleteSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenDeleteSpecs.java
@@ -60,8 +60,8 @@ public class TokenDeleteSpecs extends HapiApiSuite {
deletionValidatesMissingAdminKey(),
deletionWorksAsExpected(),
deletionValidatesAlreadyDeletedToken(),
- deletionValidatesRef(),
treasuryBecomesDeletableAfterTokenDelete(),
+ deletionValidatesRef(),
}
);
}
@@ -178,11 +178,11 @@ public class TokenDeleteSpecs extends HapiApiSuite {
.given(
cryptoCreate("payer")
).when().then(
- tokenDelete("1.2.3")
+ tokenDelete("0.0.0")
.payingWith("payer")
.signedBy("payer")
.hasKnownStatus(INVALID_TOKEN_ID),
- tokenDelete("0.0.0")
+ tokenDelete("1.2.3")
.payingWith("payer")
.signedBy("payer")
.hasKnownStatus(INVALID_TOKEN_ID)
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenTransactSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenTransactSpecs.java
index 297326f91a..0be2b76530 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenTransactSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenTransactSpecs.java
@@ -49,7 +49,9 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.EMPTY_TOKEN_TR
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_ACCOUNT_BALANCE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_AMOUNTS;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN;
@@ -80,6 +82,7 @@ public class TokenTransactSpecs extends HapiApiSuite {
nonZeroTransfersRejected(),
prechecksWork(),
allRequiredSigsAreChecked(),
+ missingEntitiesRejected(),
}
);
}
@@ -142,6 +145,22 @@ public class TokenTransactSpecs extends HapiApiSuite {
);
}
+ public HapiApiSpec missingEntitiesRejected() {
+ return defaultHapiSpec("MissingTokensRejected")
+ .given(
+ tokenCreate("some").treasury(DEFAULT_PAYER)
+ ).when().then(
+ cryptoTransfer(
+ moving(1L, "some")
+ .between(DEFAULT_PAYER, "0.0.0")
+ ).signedBy(DEFAULT_PAYER).hasKnownStatus(INVALID_ACCOUNT_ID),
+ cryptoTransfer(
+ moving(100_000_000_000_000L, "0.0.0")
+ .between(DEFAULT_PAYER, FUNDING)
+ ).signedBy(DEFAULT_PAYER).hasKnownStatus(INVALID_TOKEN_ID)
+ );
+ }
+
public HapiApiSpec balancesAreChecked() {
return defaultHapiSpec("BalancesAreChecked")
.given(
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenUpdateSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenUpdateSpecs.java
index 7502512189..cff1675a9d 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenUpdateSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenUpdateSpecs.java
@@ -75,7 +75,6 @@ public class TokenUpdateSpecs extends HapiApiSuite {
nameChanges(),
keysChange(),
validatesAlreadyDeletedToken(),
- validatesMissingRef(),
treasuryEvolves(),
deletedAutoRenewAccountCheckHolds(),
renewalPeriodCheckHolds(),
@@ -85,6 +84,7 @@ public class TokenUpdateSpecs extends HapiApiSuite {
tokensCanBeMadeImmutableWithEmptyKeyList(),
updateHappyPath(),
validatesMissingAdminKey(),
+ validatesMissingRef(),
}
);
}
@@ -148,6 +148,10 @@ public class TokenUpdateSpecs extends HapiApiSuite {
.given(
cryptoCreate("payer")
).when().then(
+ tokenUpdate("0.0.0")
+ .payingWith("payer")
+ .signedBy("payer")
+ .hasKnownStatus(INVALID_TOKEN_ID),
tokenUpdate("1.2.3")
.payingWith("payer")
.signedBy("payer") | ['test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleCreateSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java', 'hedera-node/src/test/java/com/hedera/services/store/schedule/HederaScheduleStoreTest.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenCreateSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleDeleteSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenTransactSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenUpdateSpecs.java', 'hedera-node/src/main/java/com/hedera/services/store/schedule/HederaScheduleStore.java', 'hedera-node/src/main/java/com/hedera/services/store/tokens/HederaTokenStore.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenDeleteSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java', 'hedera-node/src/test/java/com/hedera/services/store/tokens/HederaTokenStoreTest.java'] | {'.java': 13} | 13 | 13 | 0 | 0 | 13 | 5,978,433 | 1,433,153 | 168,614 | 1,051 | 3,871 | 975 | 89 | 11 | 2,921 | 153 | 913 | 38 | 1 | 1 | 1970-01-01T00:26:53 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,481 | hashgraph/hedera-services/734/733 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/733 | https://github.com/hashgraph/hedera-services/pull/734 | https://github.com/hashgraph/hedera-services/pull/734 | 1 | closes | avgHdlSubMsgSize stat is not updated after the stat refactoring | **Summary of the defect**
There was a copy-paste issue on lines 61 and 66
https://github.com/hashgraph/hedera-services/blob/d275a47e518b31a30c0689f27acc28d0e4f44479/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java#L57-L66
Also the following function is not hooked up to update the avgHdlSubMsgSize stat when handling submit message transactions
https://github.com/hashgraph/hedera-services/blob/d275a47e518b31a30c0689f27acc28d0e4f44479/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java#L81-L83
**How to reproduce (if possible)**
Run any EET with submit message transactions, the avgHdlSubMsgSize stat is not updated in the csv file(s). | d275a47e518b31a30c0689f27acc28d0e4f44479 | c55914635a01f503d4f8d91bc4f6c16e9e43f628 | https://github.com/hashgraph/hedera-services/compare/d275a47e518b31a30c0689f27acc28d0e4f44479...c55914635a01f503d4f8d91bc4f6c16e9e43f628 | diff --git a/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java b/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
index 142a9de7ac..9b6277b941 100644
--- a/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
+++ b/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
@@ -470,7 +470,7 @@ public class ServicesContext {
public HapiOpCounters opCounters() {
if (opCounters == null) {
- opCounters = new HapiOpCounters(new CounterFactory() {}, MiscUtils::baseStatNameOf);
+ opCounters = new HapiOpCounters(new CounterFactory() {}, runningAvgs(), txnCtx(), MiscUtils::baseStatNameOf);
}
return opCounters;
}
diff --git a/hedera-node/src/main/java/com/hedera/services/stats/HapiOpCounters.java b/hedera-node/src/main/java/com/hedera/services/stats/HapiOpCounters.java
index aae19ab5e3..c8a1a843f2 100644
--- a/hedera-node/src/main/java/com/hedera/services/stats/HapiOpCounters.java
+++ b/hedera-node/src/main/java/com/hedera/services/stats/HapiOpCounters.java
@@ -20,6 +20,7 @@ package com.hedera.services.stats;
*
*/
+import com.hedera.services.context.TransactionContext;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.swirlds.common.Platform;
@@ -40,11 +41,14 @@ import static com.hedera.services.stats.ServicesStatsConfig.COUNTER_RECEIVED_NAM
import static com.hedera.services.stats.ServicesStatsConfig.COUNTER_SUBMITTED_DESC_TPL;
import static com.hedera.services.stats.ServicesStatsConfig.COUNTER_SUBMITTED_NAME_TPL;
import static com.hedera.services.utils.MiscUtils.QUERY_FUNCTIONS;
+import static com.hederahashgraph.api.proto.java.HederaFunctionality.ConsensusSubmitMessage;
public class HapiOpCounters {
static Supplier<HederaFunctionality[]> allFunctions = HederaFunctionality.class::getEnumConstants;
private final CounterFactory counter;
+ private final MiscRunningAvgs runningAvgs;
+ private final TransactionContext txnCtx;
private final Function<HederaFunctionality, String> statNameFn;
EnumMap<HederaFunctionality, AtomicLong> receivedOps = new EnumMap<>(HederaFunctionality.class);
@@ -52,9 +56,16 @@ public class HapiOpCounters {
EnumMap<HederaFunctionality, AtomicLong> submittedTxns = new EnumMap<>(HederaFunctionality.class);
EnumMap<HederaFunctionality, AtomicLong> answeredQueries = new EnumMap<>(HederaFunctionality.class);
- public HapiOpCounters(CounterFactory counter, Function<HederaFunctionality, String> statNameFn) {
+ public HapiOpCounters(
+ CounterFactory counter,
+ MiscRunningAvgs runningAvgs,
+ TransactionContext txnCtx,
+ Function<HederaFunctionality, String> statNameFn
+ ) {
+ this.txnCtx = txnCtx;
this.counter = counter;
this.statNameFn = statNameFn;
+ this.runningAvgs = runningAvgs;
Arrays.stream(allFunctions.get())
.filter(function -> !IGNORED_FUNCTIONS.contains(function))
@@ -108,6 +119,10 @@ public class HapiOpCounters {
public void countHandled(HederaFunctionality txn) {
safeIncrement(handledTxns, txn);
+ if (txn == ConsensusSubmitMessage) {
+ int txnBytes = txnCtx.accessor().getTxn().getSerializedSize();
+ runningAvgs.recordHandledSubmitMessageSize(txnBytes);
+ }
}
public long handledSoFar(HederaFunctionality txn) {
diff --git a/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java b/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java
index 81443d939f..5d1d6a2f55 100644
--- a/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java
+++ b/hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java
@@ -58,7 +58,7 @@ public class MiscRunningAvgs {
runningAvg.from(
Names.RECORD_STREAM_QUEUE_SIZE,
Descriptions.RECORD_STREAM_QUEUE_SIZE,
- handledSubmitMessageSize));
+ recordStreamQueueSize));
platform.addAppStatEntry(
runningAvg.from(
Names.HANDLED_SUBMIT_MESSAGE_SIZE,
diff --git a/hedera-node/src/test/java/com/hedera/services/stats/HapiOpCountersTest.java b/hedera-node/src/test/java/com/hedera/services/stats/HapiOpCountersTest.java
index cb0a53f6a7..6f622182a7 100644
--- a/hedera-node/src/test/java/com/hedera/services/stats/HapiOpCountersTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/stats/HapiOpCountersTest.java
@@ -20,7 +20,10 @@ package com.hedera.services.stats;
*
*/
+import com.hedera.services.context.TransactionContext;
+import com.hedera.services.utils.PlatformTxnAccessor;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
+import com.hederahashgraph.api.proto.java.TransactionBody;
import com.swirlds.common.Platform;
import com.swirlds.common.StatEntry;
import org.junit.jupiter.api.AfterEach;
@@ -31,6 +34,7 @@ import org.junit.runner.RunWith;
import java.util.function.Function;
+import static com.hederahashgraph.api.proto.java.HederaFunctionality.ConsensusSubmitMessage;
import static com.hederahashgraph.api.proto.java.HederaFunctionality.CryptoTransfer;
import static com.hederahashgraph.api.proto.java.HederaFunctionality.NONE;
import static com.hederahashgraph.api.proto.java.HederaFunctionality.TokenGetInfo;
@@ -43,11 +47,14 @@ import static org.mockito.BDDMockito.argThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.verify;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
@RunWith(JUnitPlatform.class)
class HapiOpCountersTest {
Platform platform;
CounterFactory factory;
+ MiscRunningAvgs runningAvgs;
+ TransactionContext txnCtx;
Function<HederaFunctionality, String> statNameFn;
HapiOpCounters subject;
@@ -57,14 +64,17 @@ class HapiOpCountersTest {
HapiOpCounters.allFunctions = () -> new HederaFunctionality[] {
CryptoTransfer,
TokenGetInfo,
+ ConsensusSubmitMessage,
NONE
};
+ txnCtx = mock(TransactionContext.class);
platform = mock(Platform.class);
factory = mock(CounterFactory.class);
statNameFn = HederaFunctionality::toString;
+ runningAvgs = mock(MiscRunningAvgs.class);
- subject = new HapiOpCounters(factory, statNameFn);
+ subject = new HapiOpCounters(factory, runningAvgs, txnCtx, statNameFn);
}
@AfterEach
@@ -147,6 +157,43 @@ class HapiOpCountersTest {
verify(platform).addAppStatEntry(tokenInfoAns);
}
+ @Test
+ public void updatesAvgSubmitMessageHdlSizeForHandled() {
+ // setup:
+ int expectedSize = 12345;
+ TransactionBody txn = mock(TransactionBody.class);
+ PlatformTxnAccessor accessor = mock(PlatformTxnAccessor.class);
+
+ given(txn.getSerializedSize()).willReturn(expectedSize);
+ given(accessor.getTxn()).willReturn(txn);
+ given(txnCtx.accessor()).willReturn(accessor);
+
+ // when:
+ subject.countHandled(ConsensusSubmitMessage);
+
+ // then
+ verify(runningAvgs).recordHandledSubmitMessageSize(expectedSize);
+ }
+
+ @Test
+ public void doesntUpdateAvgSubmitMessageHdlSizeForCountReceivedOrSubmitted() {
+ // setup:
+ int expectedSize = 12345;
+ TransactionBody txn = mock(TransactionBody.class);
+ PlatformTxnAccessor accessor = mock(PlatformTxnAccessor.class);
+
+ given(txn.getSerializedSize()).willReturn(expectedSize);
+ given(accessor.getTxn()).willReturn(txn);
+ given(txnCtx.accessor()).willReturn(accessor);
+
+ // when:
+ subject.countReceived(ConsensusSubmitMessage);
+ subject.countSubmitted(ConsensusSubmitMessage);
+
+ // then
+ verify(runningAvgs, never()).recordHandledSubmitMessageSize(expectedSize);
+ }
+
@Test
public void updatesExpectedEntries() {
// when:
diff --git a/hedera-node/src/test/java/com/hedera/services/stats/MiscRunningAvgsTest.java b/hedera-node/src/test/java/com/hedera/services/stats/MiscRunningAvgsTest.java
index a668059268..ddccae0ad3 100644
--- a/hedera-node/src/test/java/com/hedera/services/stats/MiscRunningAvgsTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/stats/MiscRunningAvgsTest.java
@@ -65,19 +65,19 @@ class MiscRunningAvgsTest {
given(factory.from(
argThat(MiscRunningAvgs.Names.ACCOUNT_LOOKUP_RETRIES::equals),
argThat(MiscRunningAvgs.Descriptions.ACCOUNT_LOOKUP_RETRIES::equals),
- any())).willReturn(retries);
+ argThat(subject.accountLookupRetries::equals))).willReturn(retries);
given(factory.from(
argThat(MiscRunningAvgs.Names.ACCOUNT_RETRY_WAIT_MS::equals),
argThat(MiscRunningAvgs.Descriptions.ACCOUNT_RETRY_WAIT_MS::equals),
- any())).willReturn(waitMs);
+ argThat(subject.accountRetryWaitMs::equals))).willReturn(waitMs);
given(factory.from(
argThat(MiscRunningAvgs.Names.RECORD_STREAM_QUEUE_SIZE::equals),
argThat(MiscRunningAvgs.Descriptions.RECORD_STREAM_QUEUE_SIZE::equals),
- any())).willReturn(queueSizes);
+ argThat(subject.recordStreamQueueSize::equals))).willReturn(queueSizes);
given(factory.from(
argThat(MiscRunningAvgs.Names.HANDLED_SUBMIT_MESSAGE_SIZE::equals),
argThat(MiscRunningAvgs.Descriptions.HANDLED_SUBMIT_MESSAGE_SIZE::equals),
- any())).willReturn(submitSizes);
+ argThat(subject.handledSubmitMessageSize::equals))).willReturn(submitSizes);
// when:
subject.registerWith(platform); | ['hedera-node/src/main/java/com/hedera/services/stats/HapiOpCounters.java', 'hedera-node/src/main/java/com/hedera/services/stats/MiscRunningAvgs.java', 'hedera-node/src/test/java/com/hedera/services/stats/HapiOpCountersTest.java', 'hedera-node/src/test/java/com/hedera/services/stats/MiscRunningAvgsTest.java', 'hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 7,049,231 | 1,645,681 | 189,034 | 1,008 | 1,011 | 254 | 21 | 3 | 690 | 56 | 200 | 8 | 2 | 0 | 1970-01-01T00:26:44 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,488 | hashgraph/hedera-services/555/553 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/553 | https://github.com/hashgraph/hedera-services/pull/555 | https://github.com/hashgraph/hedera-services/pull/555 | 1 | closes | Contract memo size limit not enforced | Created by [185](https://github.com/swirlds/hedera-fpcomplete-audit/pull/185)
## Situation
Both the [ContractCreate](https://docs.hedera.com/guides/docs/hedera-api/smart-contracts/contractcreate) and the [ContractUpdate](https://docs.hedera.com/guides/docs/hedera-api/smart-contracts/contractupdate) operations include a `memo` field with a note.
> max 100 bytes
When the node receives a transaction from a client via the gRPC interface, this constraint appears to be enforced as a part of validations in the `TransactionHandler.validateTransactionPreConsensus` method with this concrete check at https://github.com/swirlds/services-hedera/blob/4dca8c55e200e70774f2906f44a1bbbd69944ef3/hedera-node/src/main/java/com/opencrowd/handler/TransactionHandler.java#L431
No corresponding checks were found for transactions when they come from other nodes and receive consensus (with service logic invoked as a part of `AwareProcessLogic.incorporateConsensusTxn`).
## Problem
This issue is not a problem as long all nodes can be trusted, but if we understand correctly, that will not be the case anymore in the future. | 9fdf6604a65f0bb02fb271c29c1a9718c65d48d1 | b7fd6de13640cf6a77a7b12b181203569da915e8 | https://github.com/hashgraph/hedera-services/compare/9fdf6604a65f0bb02fb271c29c1a9718c65d48d1...b7fd6de13640cf6a77a7b12b181203569da915e8 | diff --git a/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java b/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
index a388201209..082a796e7a 100644
--- a/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
+++ b/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
@@ -70,6 +70,7 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNAT
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE_COUNT_MISMATCHING_KEY;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TRANSACTION_DURATION;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.KEY_PREFIX_MISMATCH;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MEMO_TOO_LONG;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MODIFYING_IMMUTABLE_CONTRACT;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;
@@ -77,6 +78,9 @@ import static java.time.ZoneOffset.UTC;
import static java.time.temporal.ChronoUnit.SECONDS;
public class AwareProcessLogic implements ProcessLogic {
+
+ private static final int MEMO_SIZE_LIMIT = 100;
+
static Logger log = LogManager.getLogger(AwareProcessLogic.class);
private static final EnumSet<ResponseCodeEnum> SIG_RATIONALIZATION_ERRORS = EnumSet.of(
@@ -365,6 +369,8 @@ public class AwareProcessLogic implements ProcessLogic {
try {
if (!ctx.hfs().exists(fid)) {
record = ctx.contracts().getFailureTransactionRecord(txn, consensusTime, INVALID_FILE_ID);
+ } else if(isMemoTooLong(txn.getContractCreateInstance().getMemo())) {
+ record = ctx.contracts().getFailureTransactionRecord(txn, consensusTime, MEMO_TOO_LONG);
} else {
byte[] contractByteCode = ctx.hfs().cat(fid);
if (contractByteCode.length > 0) {
@@ -385,7 +391,11 @@ public class AwareProcessLogic implements ProcessLogic {
}
} else if (txn.hasContractUpdateInstance()) {
try {
- record = ctx.contracts().updateContract(txn, consensusTime);
+ if (isMemoTooLong(txn.getContractUpdateInstance().getMemo())) {
+ record = ctx.contracts().getFailureTransactionRecord(txn, consensusTime, MEMO_TOO_LONG);
+ } else {
+ record = ctx.contracts().updateContract(txn, consensusTime);
+ }
} catch (Exception e) {
log.error("Error during update contract", e);
}
@@ -417,4 +427,8 @@ public class AwareProcessLogic implements ProcessLogic {
}
return record;
}
+
+ private boolean isMemoTooLong(String memo) {
+ return memo.length() > MEMO_SIZE_LIMIT;
+ }
}
diff --git a/hedera-node/src/test/java/com/hedera/services/legacy/services/state/AwareProcessLogicTest.java b/hedera-node/src/test/java/com/hedera/services/legacy/services/state/AwareProcessLogicTest.java
index d37a61b18c..d2dec42c51 100644
--- a/hedera-node/src/test/java/com/hedera/services/legacy/services/state/AwareProcessLogicTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/legacy/services/state/AwareProcessLogicTest.java
@@ -21,9 +21,36 @@ package com.hedera.services.legacy.services.state;
*/
import com.hedera.services.context.ServicesContext;
+import com.hedera.services.context.TransactionContext;
+import com.hedera.services.context.domain.trackers.IssEventInfo;
+import com.hedera.services.fees.FeeCalculator;
+import com.hedera.services.fees.charging.TxnFeeChargingPolicy;
+import com.hedera.services.files.HederaFs;
+import com.hedera.services.ledger.HederaLedger;
+import com.hedera.services.ledger.accounts.BackingAccounts;
+import com.hedera.services.legacy.handler.SmartContractRequestHandler;
+import com.hedera.services.legacy.services.stats.HederaNodeStats;
+import com.hedera.services.records.AccountRecordsHistorian;
+import com.hedera.services.records.TxnIdRecentHistory;
+import com.hedera.services.security.ops.SystemOpAuthorization;
+import com.hedera.services.security.ops.SystemOpPolicies;
+import com.hedera.services.sigs.order.HederaSigningOrder;
+import com.hedera.services.sigs.order.SigningOrderResult;
+import com.hedera.services.state.merkle.MerkleAccount;
+import com.hedera.services.state.submerkle.SequenceNumber;
+import com.hedera.services.txns.TransitionLogicLookup;
+import com.hedera.services.txns.validation.OptionValidator;
+import com.hedera.services.utils.PlatformTxnAccessor;
import com.hedera.test.utils.IdUtils;
+import com.hederahashgraph.api.proto.java.AccountID;
+import com.hederahashgraph.api.proto.java.ContractCreateTransactionBody;
+import com.hederahashgraph.api.proto.java.ContractUpdateTransactionBody;
+import com.hederahashgraph.api.proto.java.Duration;
+import com.hederahashgraph.api.proto.java.FileID;
+import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import com.hederahashgraph.api.proto.java.TransactionBody;
import com.hederahashgraph.api.proto.java.TransactionID;
+import com.hederahashgraph.api.proto.java.TransactionRecord;
import com.swirlds.common.Address;
import com.swirlds.common.AddressBook;
import com.swirlds.common.Transaction;
@@ -31,12 +58,19 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.time.Instant;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import static com.hedera.services.context.domain.trackers.IssEventStatus.NO_KNOWN_ISS;
+import static com.hedera.services.txns.diligence.DuplicateClassification.BELIEVED_UNIQUE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MEMO_TOO_LONG;
import static org.mockito.BDDMockito.*;
@RunWith(JUnitPlatform.class)
@@ -45,12 +79,39 @@ class AwareProcessLogicTest {
Transaction platformTxn;
AddressBook book;
ServicesContext ctx;
+ TransactionContext txnCtx;
+ TransactionBody txnBody;
+ SmartContractRequestHandler contracts;
+ HederaFs hfs;
AwareProcessLogic subject;
@BeforeEach
public void setup() {
+ final Transaction txn = mock(Transaction.class);
+ final PlatformTxnAccessor txnAccessor = mock(PlatformTxnAccessor.class);
+ final HederaLedger ledger = mock(HederaLedger.class);
+ final AccountRecordsHistorian historian = mock(AccountRecordsHistorian.class);
+ final HederaSigningOrder keyOrder = mock(HederaSigningOrder.class);
+ final SigningOrderResult orderResult = mock(SigningOrderResult.class);
+ final HederaNodeStats stats = mock(HederaNodeStats.class);
+ final FeeCalculator fees = mock(FeeCalculator.class);
+ final TxnIdRecentHistory recentHistory = mock(TxnIdRecentHistory.class);
+ final Map<TransactionID, TxnIdRecentHistory> histories = mock(Map.class);
+ final BackingAccounts<AccountID, MerkleAccount> backingAccounts = mock(BackingAccounts.class);
+ final AccountID accountID = mock(AccountID.class);
+ final OptionValidator validator = mock(OptionValidator.class);
+ final TxnFeeChargingPolicy policy = mock(TxnFeeChargingPolicy.class);
+ final SystemOpPolicies policies = mock(SystemOpPolicies.class);
+ final TransitionLogicLookup lookup = mock(TransitionLogicLookup.class);
+ hfs = mock(HederaFs.class);
+
+ given(histories.get(any())).willReturn(recentHistory);
+
+ txnCtx = mock(TransactionContext.class);
ctx = mock(ServicesContext.class);
+ txnBody = mock(TransactionBody.class);
+ contracts = mock(SmartContractRequestHandler.class);
mockLog = mock(Logger.class);
platformTxn = new Transaction(com.hederahashgraph.api.proto.java.Transaction.newBuilder()
.setBodyBytes(TransactionBody.newBuilder()
@@ -69,6 +130,51 @@ class AwareProcessLogicTest {
given(book.getAddress(1)).willReturn(stakedAddress);
given(book.getAddress(666L)).willReturn(zeroStakeAddress);
given(ctx.addressBook()).willReturn(book);
+ given(ctx.ledger()).willReturn(ledger);
+ given(ctx.txnCtx()).willReturn(txnCtx);
+ given(ctx.recordsHistorian()).willReturn(historian);
+ given(ctx.backedKeyOrder()).willReturn(keyOrder);
+ given(ctx.stats()).willReturn(stats);
+ given(ctx.fees()).willReturn(fees);
+ given(ctx.txnHistories()).willReturn(histories);
+ given(ctx.backingAccounts()).willReturn(backingAccounts);
+ given(ctx.validator()).willReturn(validator);
+ given(ctx.txnChargingPolicy()).willReturn(policy);
+ given(ctx.systemOpPolicies()).willReturn(policies);
+ given(ctx.transitionLogic()).willReturn(lookup);
+ given(ctx.hfs()).willReturn(hfs);
+ given(ctx.contracts()).willReturn(contracts);
+
+ given(txnCtx.accessor()).willReturn(txnAccessor);
+ given(txnCtx.submittingNodeAccount()).willReturn(accountID);
+ given(txnCtx.isPayerSigKnownActive()).willReturn(true);
+ given(txnAccessor.getPlatformTxn()).willReturn(txn);
+
+ given(txn.getSignatures()).willReturn(Collections.emptyList());
+ given(keyOrder.keysForPayer(any(), any())).willReturn(orderResult);
+ given(keyOrder.keysForOtherParties(any(), any())).willReturn(orderResult);
+
+ final com.hederahashgraph.api.proto.java.Transaction signedTxn = mock(com.hederahashgraph.api.proto.java.Transaction.class);
+ final TransactionID txnId = mock(TransactionID.class);
+
+ given(txnAccessor.getSignedTxn()).willReturn(signedTxn);
+ given(txnAccessor.getTxn()).willReturn(txnBody);
+ given(signedTxn.hasSigs()).willReturn(false);
+ given(txnBody.getTransactionID()).willReturn(txnId);
+ given(txnBody.getTransactionValidDuration()).willReturn(Duration.getDefaultInstance());
+
+ given(recentHistory.currentDuplicityFor(anyLong())).willReturn(BELIEVED_UNIQUE);
+ given(backingAccounts.contains(any())).willReturn(true);
+
+ given(validator.isValidTxnDuration(anyLong())).willReturn(true);
+ given(validator.chronologyStatus(any(), any())).willReturn(ResponseCodeEnum.OK);
+ given(validator.isValidAutoRenewPeriod(any())).willReturn(true);
+
+ given(txnBody.getNodeAccountID()).willReturn(accountID);
+ given(policy.apply(any(), any())).willReturn(ResponseCodeEnum.OK);
+ given(policies.check(any())).willReturn(SystemOpAuthorization.AUTHORIZED);
+ given(lookup.lookupFor(any(), any())).willReturn(Optional.empty());
+ given(hfs.exists(any())).willReturn(true);
subject = new AwareProcessLogic(ctx);
}
@@ -106,4 +212,164 @@ class AwareProcessLogicTest {
// then:
verify(mockLog).error(argThat((String s) -> s.startsWith("Catastrophic invariant failure!")));
}
-}
\\ No newline at end of file
+
+
+ @Test
+ @DisplayName("incorporateConsensusTxn assigns a failure due to memo size for ContractCreateInstance")
+ public void shortCircuitsOnMemoSizeForContractCreate() {
+ // setup:
+ final Instant now = Instant.now();
+ final Instant then = now.minusMillis(10L);
+ final IssEventInfo eventInfo = mock(IssEventInfo.class);
+ final TransactionRecord record = mock(TransactionRecord.class);
+ given(eventInfo.status()).willReturn(NO_KNOWN_ISS);
+
+ given(ctx.consensusTimeOfLastHandledTxn()).willReturn(then);
+ given(ctx.addressBook().getAddress(666).getStake()).willReturn(1L);
+ given(ctx.issEventInfo()).willReturn(eventInfo);
+ given(txnCtx.consensusTime()).willReturn(now);
+ given(txnBody.hasContractCreateInstance()).willReturn(true);
+ given(txnBody.getContractCreateInstance()).willReturn(ContractCreateTransactionBody.newBuilder()
+ .setMemo("This is a very long memo because it contains more than 100 characters, " +
+ "which is greater than it is expected")
+ .setFileID(FileID.newBuilder().build())
+ .setAutoRenewPeriod(Duration.newBuilder().setSeconds(10).build())
+ .build());
+
+ given(contracts.getFailureTransactionRecord(any(), any(), any())).willReturn(record);
+
+ // when:
+ subject.incorporateConsensusTxn(platformTxn, now, 666);
+
+ // then:
+ verify(contracts).getFailureTransactionRecord(txnBody, now, MEMO_TOO_LONG);
+ }
+
+ @Test
+ @DisplayName("creates a contract with small memo size")
+ public void contractCreateInstanceIsCreated() {
+ // setup:
+ final byte[] contractByteCode = new byte[] { 100 };
+ final SequenceNumber sequenceNumber = new SequenceNumber();
+ final Instant now = Instant.now();
+ final Instant then = now.minusMillis(10L);
+ final IssEventInfo eventInfo = mock(IssEventInfo.class);
+ final TransactionRecord record = mock(TransactionRecord.class);
+ given(eventInfo.status()).willReturn(NO_KNOWN_ISS);
+
+ given(ctx.consensusTimeOfLastHandledTxn()).willReturn(then);
+ given(ctx.addressBook().getAddress(666).getStake()).willReturn(1L);
+ given(ctx.issEventInfo()).willReturn(eventInfo);
+ given(ctx.seqNo()).willReturn(sequenceNumber);
+
+ given(txnCtx.consensusTime()).willReturn(now);
+ given(txnBody.hasContractCreateInstance()).willReturn(true);
+ given(txnBody.getContractCreateInstance()).willReturn(ContractCreateTransactionBody.newBuilder()
+ .setMemo("This is a very small memo")
+ .setFileID(FileID.newBuilder().build())
+ .setAutoRenewPeriod(Duration.newBuilder().setSeconds(10).build())
+ .build());
+ given(hfs.cat(any())).willReturn(contractByteCode);
+
+
+ // when:
+ subject.incorporateConsensusTxn(platformTxn, now, 666);
+
+ // then:
+ verify(contracts).createContract(txnBody, now, contractByteCode, sequenceNumber);
+ }
+
+ @Test
+ @DisplayName("creates a contract with no memo")
+ public void contractCreateInstanceIsCreatedNoMemo() {
+ // setup:
+ final byte[] contractByteCode = new byte[] { 100 };
+ final SequenceNumber sequenceNumber = new SequenceNumber();
+ final Instant now = Instant.now();
+ final Instant then = now.minusMillis(10L);
+ final IssEventInfo eventInfo = mock(IssEventInfo.class);
+ final TransactionRecord record = mock(TransactionRecord.class);
+ given(eventInfo.status()).willReturn(NO_KNOWN_ISS);
+
+ given(ctx.consensusTimeOfLastHandledTxn()).willReturn(then);
+ given(ctx.addressBook().getAddress(666).getStake()).willReturn(1L);
+ given(ctx.issEventInfo()).willReturn(eventInfo);
+ given(ctx.seqNo()).willReturn(sequenceNumber);
+
+ given(txnCtx.consensusTime()).willReturn(now);
+ given(txnBody.hasContractCreateInstance()).willReturn(true);
+ given(txnBody.getContractCreateInstance()).willReturn(ContractCreateTransactionBody.newBuilder()
+ .setFileID(FileID.newBuilder().build())
+ .setAutoRenewPeriod(Duration.newBuilder().setSeconds(10).build())
+ .build());
+ given(hfs.cat(any())).willReturn(contractByteCode);
+
+
+ // when:
+ subject.incorporateConsensusTxn(platformTxn, now, 666);
+
+ // then:
+ verify(contracts).createContract(txnBody, now, contractByteCode, sequenceNumber);
+ }
+
+ @Test
+ @DisplayName("incorporateConsensusTxn assigns a failure due to memo size for ContractUpdateInstance")
+ public void shortCircuitsOnMemoSizeForContractUpdate() {
+ // setup:
+ final Instant now = Instant.now();
+ final Instant then = now.minusMillis(10L);
+ final IssEventInfo eventInfo = mock(IssEventInfo.class);
+ final TransactionRecord record = mock(TransactionRecord.class);
+ given(eventInfo.status()).willReturn(NO_KNOWN_ISS);
+
+ given(ctx.consensusTimeOfLastHandledTxn()).willReturn(then);
+ given(ctx.addressBook().getAddress(666).getStake()).willReturn(1L);
+ given(ctx.issEventInfo()).willReturn(eventInfo);
+ given(txnCtx.consensusTime()).willReturn(now);
+ given(txnBody.hasContractUpdateInstance()).willReturn(true);
+ given(txnBody.getContractUpdateInstance()).willReturn(ContractUpdateTransactionBody.newBuilder()
+ .setMemo("This is a very long memo because it contains more than 100 characters, " +
+ "which is greater than it is expected")
+ .setFileID(FileID.newBuilder().build())
+ .setAutoRenewPeriod(Duration.newBuilder().setSeconds(10).build())
+ .build());
+
+ given(contracts.getFailureTransactionRecord(any(), any(), any())).willReturn(record);
+
+ // when:
+ subject.incorporateConsensusTxn(platformTxn, now, 666);
+
+ // then:
+ verify(contracts).getFailureTransactionRecord(txnBody, now, MEMO_TOO_LONG);
+ }
+
+ @Test
+ @DisplayName("ContractUpdateInstance is updated in contracts")
+ public void contractUpdateInstanceIsUpdate() {
+ // setup:
+ final Instant now = Instant.now();
+ final Instant then = now.minusMillis(10L);
+ final IssEventInfo eventInfo = mock(IssEventInfo.class);
+ final TransactionRecord record = mock(TransactionRecord.class);
+ given(eventInfo.status()).willReturn(NO_KNOWN_ISS);
+
+ given(ctx.consensusTimeOfLastHandledTxn()).willReturn(then);
+ given(ctx.addressBook().getAddress(666).getStake()).willReturn(1L);
+ given(ctx.issEventInfo()).willReturn(eventInfo);
+ given(txnCtx.consensusTime()).willReturn(now);
+ given(txnBody.hasContractUpdateInstance()).willReturn(true);
+ given(txnBody.getContractUpdateInstance()).willReturn(ContractUpdateTransactionBody.newBuilder()
+ .setMemo("This is a very small memo")
+ .setFileID(FileID.newBuilder().build())
+ .setAutoRenewPeriod(Duration.newBuilder().setSeconds(10).build())
+ .build());
+
+
+ // when:
+ subject.incorporateConsensusTxn(platformTxn, now, 666);
+
+ // then:
+ verify(contracts).updateContract(txnBody, now);
+ }
+
+} | ['hedera-node/src/test/java/com/hedera/services/legacy/services/state/AwareProcessLogicTest.java', 'hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,869,846 | 1,598,520 | 183,146 | 952 | 719 | 175 | 16 | 1 | 1,128 | 112 | 277 | 14 | 4 | 0 | 1970-01-01T00:26:40 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,483 | hashgraph/hedera-services/664/654 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/654 | https://github.com/hashgraph/hedera-services/pull/664 | https://github.com/hashgraph/hedera-services/pull/664 | 1 | closes | QueryFailuresSpec suite's getsExpectedRejection should return FILE_DELETED as pre-check status | **Summary of the defect**
When querying the file contents of a deleted file. Instead it returns pre-check status as OK.
**How to reproduce (if possible)**
Please refer to QueryFailuresSpec.java source code.
**Service logs (if applicable)**
```
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| 5e10ea5a36b7ef87144e0f64a6b46ce7b093ab49 | 98894770039f32fb4d0a4fcdda1cc1946cf028be | https://github.com/hashgraph/hedera-services/compare/5e10ea5a36b7ef87144e0f64a6b46ce7b093ab49...98894770039f32fb4d0a4fcdda1cc1946cf028be | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/QueryFailuresSpec.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/QueryFailuresSpec.java
index f36f8ed184..e6b2a37499 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/QueryFailuresSpec.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/QueryFailuresSpec.java
@@ -34,6 +34,7 @@ import static com.hedera.services.bdd.spec.HapiApiSpec.defaultHapiSpec;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileInfo;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.*;
+import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.FILE_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FILE_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MAX_FILE_SIZE_EXCEEDED;
@@ -73,6 +74,7 @@ public class QueryFailuresSpec extends HapiApiSuite {
getFileInfo("tbd")
.nodePayment(1_234L)
.hasAnswerOnlyPrecheck(OK)
+ .hasDeleted(true)
.logged()
);
} | ['test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/QueryFailuresSpec.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 7,017,786 | 1,637,977 | 188,321 | 990 | 98 | 24 | 2 | 1 | 521 | 76 | 146 | 19 | 0 | 1 | 1970-01-01T00:26:42 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,484 | hashgraph/hedera-services/660/659 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/659 | https://github.com/hashgraph/hedera-services/pull/660 | https://github.com/hashgraph/hedera-services/pull/660 | 1 | closes | Unavailable channel when using SuiteRunner to run multiple suites that include load/perf test | https://github.com/hashgraph/hedera-services/blob/c3aa15f4edc8f406ada2c98dadc9d7af2ed252e0/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java#L231-L241
https://github.com/hashgraph/hedera-services/blob/c3aa15f4edc8f406ada2c98dadc9d7af2ed252e0/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java#L284-L297
With the current logic, the channels between clients and servers are closed when a spec failed or there are pending ops (in case of load/perf test). This causes the following exception when using SuiteRunner to run multiple suites that include load/perf test.
```
io.grpc.StatusRuntimeException: UNAVAILABLE: Channel shutdown invoked
at io.grpc.stub.ClientCalls.toStatusRuntimeException(ClientCalls.java:235) ~[SuiteRunner.jar:?]
at io.grpc.stub.ClientCalls.getUnchecked(ClientCalls.java:216) ~[SuiteRunner.jar:?]
at io.grpc.stub.ClientCalls.blockingUnaryCall(ClientCalls.java:141) ~[SuiteRunner.jar:?]
at com.hederahashgraph.service.proto.java.FileServiceGrpc$FileServiceBlockingStub.getFileContent(FileServiceGrpc.java:638) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.fees.FeesAndRatesProvider.downloadWith(FeesAndRatesProvider.java:157) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.fees.FeesAndRatesProvider.lookupDownloadFee(FeesAndRatesProvider.java:150) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.fees.FeesAndRatesProvider.downloadRateSet(FeesAndRatesProvider.java:120) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.fees.FeesAndRatesProvider.init(FeesAndRatesProvider.java:79) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.HapiApiSpec.init(HapiApiSpec.java:187) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.spec.HapiApiSpec.run(HapiApiSpec.java:168) ~[SuiteRunner.jar:?]
at java.util.AbstractList$RandomAccessSpliterator.forEachRemaining(AbstractList.java:720) ~[?:?]
at java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658) ~[?:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSync(HapiApiSuite.java:246) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSuite(HapiApiSuite.java:138) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.HapiApiSuite.runSuiteSync(HapiApiSuite.java:131) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.lambda$runSuitesSync$14(SuiteRunner.java:407) ~[SuiteRunner.jar:?]
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:176) ~[?:?]
at java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948) ~[?:?]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484) ~[?:?]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474) ~[?:?]
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913) ~[?:?]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:578) ~[?:?]
at com.hedera.services.bdd.suites.SuiteRunner.runSuitesSync(SuiteRunner.java:408) ~[SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.lambda$runTargetCategories$10(SuiteRunner.java:381) ~[SuiteRunner.jar:?]
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195) [?:?]
at java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1654) [?:?]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484) [?:?]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474) [?:?]
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913) [?:?]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) [?:?]
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:578) [?:?]
at com.hedera.services.bdd.suites.SuiteRunner.runTargetCategories(SuiteRunner.java:381) [SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.runCategories(SuiteRunner.java:350) [SuiteRunner.jar:?]
at com.hedera.services.bdd.suites.SuiteRunner.main(SuiteRunner.java:290) [SuiteRunner.jar:?]
```
**Solution:**
- Add correct tearDown function for `HapiApiSpec`, `HapiApiSuite` and `HapiApiClients`
- Move the logic of tearing down clients to in suite instead of in spec. | c3aa15f4edc8f406ada2c98dadc9d7af2ed252e0 | 38daa047f718e4bc57164c57391a942791177b56 | https://github.com/hashgraph/hedera-services/compare/c3aa15f4edc8f406ada2c98dadc9d7af2ed252e0...38daa047f718e4bc57164c57391a942791177b56 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
index 103667656c..7e2647612f 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
@@ -202,6 +202,12 @@ public class HapiApiSpec implements Runnable {
return true;
}
+ private void tearDown() {
+ if (finalizingExecutor != null) {
+ finalizingExecutor.shutdown();
+ }
+ }
+
private void exec(List<HapiSpecOperation> ops) {
if (status == ERROR) {
log.warn("'" + name + "' failed to initialize, being skipped!");
@@ -233,12 +239,8 @@ public class HapiApiSpec implements Runnable {
if (finishingError.get().isPresent()) {
status = FAILED;
}
- } else {
- if (finalizingExecutor != null) {
- finalizingExecutor.shutdown();
- this.clients().closeChannels();
- }
}
+ tearDown();
log.info(logPrefix() + "final status: " + status + "!");
if(saveContextFlag) {
@@ -283,7 +285,6 @@ public class HapiApiSpec implements Runnable {
private void finishFinalizingOps() {
if (pendingOps.isEmpty()) {
- finalizingExecutor.shutdown();
return;
}
log.info(logPrefix() + "executed " + numLedgerOpsExecuted.get() + " ledger ops.");
@@ -292,8 +293,6 @@ public class HapiApiSpec implements Runnable {
startFinalizingOps();
}
finalizingFuture.join();
- finalizingExecutor.shutdown();
- this.clients().closeChannels();
}
public void offerFinisher(HapiSpecOpFinisher finisher) {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiApiClients.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiApiClients.java
index d53b5e76b1..db96944cd4 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiApiClients.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiApiClients.java
@@ -40,7 +40,6 @@ import com.hederahashgraph.service.proto.java.NetworkServiceGrpc.NetworkServiceB
import com.hederahashgraph.service.proto.java.TokenServiceGrpc;
import com.hederahashgraph.service.proto.java.TokenServiceGrpc.TokenServiceBlockingStub;
import io.grpc.ManagedChannel;
-import io.grpc.ManagedChannelBuilder;
import io.grpc.netty.GrpcSslContexts;
import io.grpc.netty.NegotiationType;
import io.grpc.netty.NettyChannelBuilder;
@@ -49,12 +48,10 @@ import io.netty.handler.ssl.SupportedCipherSuiteFilter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Optional;
import static java.util.stream.Collectors.toMap;
@@ -73,7 +70,7 @@ public class HapiApiClients {
private final List<NodeConnectInfo> nodes;
private final Map<AccountID, String> stubIds;
private final Map<AccountID, String> tlsStubIds;
- private List<ManagedChannel> channels;
+ private static Map<String, ManagedChannel> channels = new HashMap<>();
private final ManagedChannel createNettyChannel(NodeConnectInfo node, boolean useTls) {
try {
@@ -100,7 +97,6 @@ public class HapiApiClients {
.usePlaintext()
.build();
}
- channels.add(channel);
return channel;
} catch (Exception e) {
log.error("Error creating Netty channel", e);
@@ -108,8 +104,21 @@ public class HapiApiClients {
return null;
}
+ private void addStubs(NodeConnectInfo node, String uri, boolean useTls) {
+ if (!channels.containsKey(uri)) {
+ ManagedChannel channel = createNettyChannel(node, useTls);
+ channels.put(uri, channel);
+ scSvcStubs.put(uri, SmartContractServiceGrpc.newBlockingStub(channel));
+ consSvcStubs.put(uri, ConsensusServiceGrpc.newBlockingStub(channel));
+ fileSvcStubs.put(uri, FileServiceGrpc.newBlockingStub(channel));
+ tokenSvcStubs.put(uri, TokenServiceGrpc.newBlockingStub(channel));
+ cryptoSvcStubs.put(uri, CryptoServiceGrpc.newBlockingStub(channel));
+ freezeSvcStubs.put(uri, FreezeServiceGrpc.newBlockingStub(channel));
+ networkSvcStubs.put(uri, NetworkServiceGrpc.newBlockingStub(channel));
+ }
+ }
+
private HapiApiClients(List<NodeConnectInfo> nodes, AccountID defaultNode) {
- this.channels = new ArrayList<>();
this.nodes = nodes;
stubIds = nodes
.stream()
@@ -119,25 +128,8 @@ public class HapiApiClients {
.collect(toMap(NodeConnectInfo::getAccount, NodeConnectInfo::tlsUri));
int before = stubCount();
nodes.forEach(node -> {
- ManagedChannel channel = createNettyChannel(node, false);
- String stubsId = node.uri();
- scSvcStubs.computeIfAbsent(stubsId, ignore -> SmartContractServiceGrpc.newBlockingStub(channel));
- consSvcStubs.computeIfAbsent(stubsId, ignore -> ConsensusServiceGrpc.newBlockingStub(channel));
- fileSvcStubs.computeIfAbsent(stubsId, ignore -> FileServiceGrpc.newBlockingStub(channel));
- tokenSvcStubs.computeIfAbsent(stubsId, ignore -> TokenServiceGrpc.newBlockingStub(channel));
- cryptoSvcStubs.computeIfAbsent(stubsId, ignore -> CryptoServiceGrpc.newBlockingStub(channel));
- freezeSvcStubs.computeIfAbsent(stubsId, ignore -> FreezeServiceGrpc.newBlockingStub(channel));
- networkSvcStubs.computeIfAbsent(stubsId, ignore -> NetworkServiceGrpc.newBlockingStub(channel));
-
- ManagedChannel tlsChannel = createNettyChannel(node, true);
- String tlsStubsId = node.tlsUri();
- scSvcStubs.computeIfAbsent(tlsStubsId, ignore -> SmartContractServiceGrpc.newBlockingStub(tlsChannel));
- consSvcStubs.computeIfAbsent(tlsStubsId, ignore -> ConsensusServiceGrpc.newBlockingStub(tlsChannel));
- fileSvcStubs.computeIfAbsent(tlsStubsId, ignore -> FileServiceGrpc.newBlockingStub(tlsChannel));
- tokenSvcStubs.computeIfAbsent(tlsStubsId, ignore -> TokenServiceGrpc.newBlockingStub(tlsChannel));
- cryptoSvcStubs.computeIfAbsent(tlsStubsId, ignore -> CryptoServiceGrpc.newBlockingStub(tlsChannel));
- freezeSvcStubs.computeIfAbsent(tlsStubsId, ignore -> FreezeServiceGrpc.newBlockingStub(tlsChannel));
- networkSvcStubs.computeIfAbsent(stubsId, ignore -> NetworkServiceGrpc.newBlockingStub(tlsChannel));
+ addStubs(node, node.uri(), false);
+ addStubs(node, node.tlsUri(), true);
});
int after = stubCount();
this.defaultNode = defaultNode;
@@ -207,10 +199,26 @@ public class HapiApiClients {
/**
* Close all netty channels that are opened for clients
*/
- public void closeChannels() {
+ private static void closeChannels() {
if (channels.isEmpty()) {
return;
}
- channels.forEach(channel -> channel.shutdown());
+ channels.forEach((uri, channel) -> channel.shutdown());
+ channels.clear();
+ }
+
+ private static void clearStubs() {
+ scSvcStubs.clear();
+ consSvcStubs.clear();
+ fileSvcStubs.clear();
+ tokenSvcStubs.clear();
+ cryptoSvcStubs.clear();
+ freezeSvcStubs.clear();
+ networkSvcStubs.clear();
+ }
+
+ public static void tearDown() {
+ closeChannels();
+ clearStubs();
}
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/HapiApiSuite.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/HapiApiSuite.java
index 62682887d2..7306af93bd 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/HapiApiSuite.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/HapiApiSuite.java
@@ -24,6 +24,7 @@ import com.google.common.math.Stats;
import com.hedera.services.bdd.spec.HapiApiSpec;
import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.HapiSpecSetup;
+import com.hedera.services.bdd.spec.infrastructure.HapiApiClients;
import com.hedera.services.bdd.spec.queries.HapiQueryOp;
import com.hedera.services.bdd.spec.stats.HapiStats;
import com.hedera.services.bdd.spec.stats.OpObs;
@@ -42,7 +43,6 @@ import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Function;
@@ -138,6 +138,7 @@ public abstract class HapiApiSuite {
runner.accept(specs);
finalSpecs = specs;
summarizeResults(getResultsLogger());
+ HapiApiClients.tearDown();
return finalOutcomeFor(finalSpecs);
}
| ['test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/HapiApiSuite.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiApiClients.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 6,997,760 | 1,632,751 | 187,675 | 988 | 3,717 | 900 | 80 | 3 | 4,316 | 182 | 1,154 | 45 | 2 | 1 | 1970-01-01T00:26:42 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,485 | hashgraph/hedera-services/652/578 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/578 | https://github.com/hashgraph/hedera-services/pull/652 | https://github.com/hashgraph/hedera-services/pull/652 | 1 | closes | Find out why UmbrellaRedux only send out ~50% of required operations for HCS requests | **Summary of the defect**
While doing perf test, UmbrellaRedux seems to only send out 50% of configured requests rate.
**How to reproduce (if possible)**
Check out the nightly JRS regression HCS perf test config.
**Service logs (if applicable)**
```
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
None
| 4b43e5d5e676a0cdad122688e6c0804360b54af9 | 4c8845a19f8f4e95c0d4f885103d2926297908ab | https://github.com/hashgraph/hedera-services/compare/4b43e5d5e676a0cdad122688e6c0804360b54af9...4c8845a19f8f4e95c0d4f885103d2926297908ab | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/providers/ops/consensus/RandomMessageSubmit.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/providers/ops/consensus/RandomMessageSubmit.java
index 6580d7ce18..7f9b0bd8f5 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/providers/ops/consensus/RandomMessageSubmit.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/providers/ops/consensus/RandomMessageSubmit.java
@@ -98,10 +98,6 @@ public class RandomMessageSubmit implements OpProvider {
.hasKnownStatusFrom(permissibleOutcomes)
.hasPrecheckFrom(STANDARD_PERMISSIBLE_PRECHECKS);
- if (r.nextBoolean()) {
- op = op.usePresetTimestamp();
- }
-
return Optional.of(op);
}
} | ['test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/providers/ops/consensus/RandomMessageSubmit.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,935,113 | 1,617,567 | 186,200 | 982 | 115 | 19 | 4 | 1 | 451 | 66 | 135 | 20 | 0 | 1 | 1970-01-01T00:26:42 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,489 | hashgraph/hedera-services/552/551 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/551 | https://github.com/hashgraph/hedera-services/pull/552 | https://github.com/hashgraph/hedera-services/pull/552 | 1 | closes | Fees - FieldSourcedFeeScreening can throw null exception in some scenarios | Created by [261](https://github.com/swirlds/hedera-fpcomplete-audit/pull/261)
## Situation
We have a method named
[canParticipantAfford](https://github.com/hashgraph/hedera-services/blob/e4f825ce23d5e4429192c8bf218ea220064d548e/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java#L84
"canParticipantAfford") in the class `FieldSourcedFeeScreening`
defined like this:
``` java
@Override
public boolean canParticipantAfford(AccountID participant, EnumSet<TxnFeeType> fees) {
long exemptAmount = 0;
if (fees.contains(THRESHOLD_RECORD)) {
exemptAmount += exemptions.isExemptFromRecordFees(participant) ? feeAmounts.get(THRESHOLD_RECORD) : 0;
} // what if feeAmounts.get doesn't have threshold_record
long netAmount = totalAmountOf(fees) - exemptAmount;
return check.canAfford(participant, netAmount);
}
```
The problem here is that the above code can throw a null exception if
the `feeAmounts` doesn't have the key `THRESHOLD_RECORD` in it. We
have a small test to reproduce the above behavior. You can add the
following method in the test file
[FieldSourcedFeeScreeningTest.java](https://github.com/hashgraph/hedera-services/blob/e4f825ce23d5e4429192c8bf218ea220064d548e/hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java#L103
"FieldSourcedFeeScreeningTest.java"):
``` java
@Test
public void feeTest() {
// setup:
EnumSet<TxnFeeType> thresholdRecordFee = EnumSet.of(THRESHOLD_RECORD);
subject.setFor(NETWORK, network);
subject.setFor(SERVICE, service);
subject.setFor(NODE, node);
subject.setFor(CACHE_RECORD, cacheRecord);
// when:
boolean viability = subject.canParticipantAfford(master, thresholdRecordFee);
// then:
assertFalse(viability);
}
```
And when the test is executed, it fails with null exception.
## Problem
When the `fees` contains `THRESHOLD_RECORD` and the `feeAmounts`
doesn't contain `THRESHOLD_RECORD` as its key, in some conditions it
can lead to a null exception.
Also, we could see the same pattern at various other locations where it
might be an issue:
* In [itemizedFess method](https://github.com/hashgraph/hedera-services/blob/e4f825ce23d5e4429192c8bf218ea220064d548e/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java#L148 "itemizedFess method")
* In [totalAmountOf method](https://github.com/hashgraph/hedera-services/blob/e4f825ce23d5e4429192c8bf218ea220064d548e/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java#L93 "totalAmountOf method")
## Suggestions
There are multiple suggestions based on the situation:
* Document the pre-condition of the call explicitly so that the caller
is aware of it.
* If the pre-condition doesn't exist, check for null explicitly and
throw a new custom exception or modify the business logic
accordingly.
| 97f6446b96d04ffc955160685cd018c6ba2af64f | b93819f7631b6c2d46ba2ee02d08250bb648d29a | https://github.com/hashgraph/hedera-services/compare/97f6446b96d04ffc955160685cd018c6ba2af64f...b93819f7631b6c2d46ba2ee02d08250bb648d29a | diff --git a/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java b/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java
index f68c753f11..8fe576492a 100644
--- a/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java
+++ b/hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java
@@ -83,14 +83,18 @@ public class FieldSourcedFeeScreening implements TxnScopedFeeScreening {
@Override
public boolean canParticipantAfford(AccountID participant, EnumSet<TxnFeeType> fees) {
long exemptAmount = 0;
- if (fees.contains(THRESHOLD_RECORD)) {
+ if (fees.contains(THRESHOLD_RECORD) && feeAmounts.containsKey(THRESHOLD_RECORD)) {
exemptAmount += exemptions.isExemptFromRecordFees(participant) ? feeAmounts.get(THRESHOLD_RECORD) : 0;
}
- long netAmount = totalAmountOf(fees) - exemptAmount;
+
+ final long netAmount = totalAmountOf(fees) - exemptAmount;
return check.canAfford(participant, netAmount);
}
protected long totalAmountOf(EnumSet<TxnFeeType> fees) {
- return fees.stream().mapToLong(feeAmounts::get).sum();
+ return fees.stream()
+ .filter(fee -> feeAmounts.containsKey(fee))
+ .mapToLong(feeAmounts::get)
+ .sum();
}
}
diff --git a/hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java b/hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java
index 269595a597..f965d64b7e 100644
--- a/hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java
@@ -41,8 +41,14 @@ import static com.hedera.services.fees.TxnFeeType.*;
@RunWith(JUnitPlatform.class)
class FieldSourcedFeeScreeningTest {
- long willingness = 1_000L;
- long network = 500L, service = 200L, node = 100L, stateRecord = 150L, cacheRecord = 50L;
+
+ final long willingness = 1_000L;
+ final long network = 500L;
+ final long service = 200L;
+ final long node = 100L;
+ final long stateRecord = 150L;
+ final long cacheRecord = 50L;
+
AccountID payer = IdUtils.asAccount("0.0.1001");
AccountID master = IdUtils.asAccount("0.0.50");
AccountID participant = IdUtils.asAccount("0.0.2002");
@@ -183,6 +189,39 @@ class FieldSourcedFeeScreeningTest {
verify(check).canAfford(payer, willingness);
}
+ @Test
+ public void canParticipantAffordTest() {
+ // setup:
+ EnumSet<TxnFeeType> thresholdRecordFee = EnumSet.of(THRESHOLD_RECORD);
+ subject.setFor(NETWORK, network);
+ subject.setFor(SERVICE, service);
+ subject.setFor(NODE, node);
+ subject.setFor(CACHE_RECORD, cacheRecord);
+ // when:
+ boolean viability = subject.canParticipantAfford(master, thresholdRecordFee);
+ // then:
+ assertFalse(viability);
+ }
+
+ @Test
+ public void participantCantAffordTest() {
+ // setup:
+ final BalanceCheck check = (payer, amount) -> amount >= stateRecord;
+
+ final EnumSet<TxnFeeType> thresholdRecordFee = EnumSet.of(THRESHOLD_RECORD);
+ subject.setFor(NETWORK, network);
+ subject.setFor(SERVICE, service);
+ subject.setFor(NODE, node);
+ subject.setFor(CACHE_RECORD, cacheRecord);
+ subject.setFor(THRESHOLD_RECORD, stateRecord);
+ subject.setBalanceCheck(check);
+
+ // when:
+ boolean viability = subject.canParticipantAfford(master, thresholdRecordFee);
+ // then:
+ assertFalse(viability);
+ }
+
private void givenKnownFeeAmounts() {
subject.setFor(NETWORK, network);
subject.setFor(SERVICE, service); | ['hedera-node/src/main/java/com/hedera/services/fees/charging/FieldSourcedFeeScreening.java', 'hedera-node/src/test/java/com/hedera/services/fees/charging/FieldSourcedFeeScreeningTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,842,052 | 1,591,296 | 182,207 | 939 | 424 | 119 | 10 | 1 | 2,973 | 254 | 787 | 67 | 5 | 2 | 1970-01-01T00:26:40 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,490 | hashgraph/hedera-services/540/497 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/497 | https://github.com/hashgraph/hedera-services/pull/540 | https://github.com/hashgraph/hedera-services/pull/540 | 1 | closes | JRS nightly regression AWS-Services-Daily-SoftwareUpdate-4N-1C/ServicesState-Recovery-300-7m failed in RecoverStateValidator step | **Summary of the defect**
Please refer to the error logs below.
**How to reproduce (if possible)**
https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1598863554228600
**Service logs (if applicable)**
```
---- RecoverStateValidator FAILED validation ----
<<INFO>>
Node 0 finished recover run and resume normally as expected
Node 2 finished recover run and resume normally as expected
<<ERROR>>
Node 1 did not save recover state as expected !
Node 3 did not save recover state as expected !
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| b440672250924901f9645649d442fa65d9592a20 | 4d451ffb3151e44ec09ae44b4993e097c795e86d | https://github.com/hashgraph/hedera-services/compare/b440672250924901f9645649d442fa65d9592a20...4d451ffb3151e44ec09ae44b4993e097c795e86d | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
index 279c6226ee..7be32ad6f4 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
@@ -82,6 +82,6 @@ public interface OpProvider {
String UNIQUE_PAYER_ACCOUNT = "uniquePayerAccount";
long UNIQUE_PAYER_ACCOUNT_INITIAL_BALANCE = 50_000_000_000L;
- long TRANSACTION_FEE = 500_000_000L;
+ long TRANSACTION_FEE = 5_000_000_000L;
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java
index 9b95d6b757..3eb44d8a4c 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java
@@ -247,7 +247,6 @@ public class TxnUtils {
.setSeconds(instant.getEpochSecond() + offsetSecs)
.setNanos(candidateNano).build();
- log.info("timestamp : {}", uniqueTS);
return uniqueTS;
}
| ['test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,840,034 | 1,590,860 | 182,153 | 939 | 119 | 38 | 3 | 2 | 774 | 108 | 214 | 26 | 1 | 1 | 1970-01-01T00:26:39 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,492 | hashgraph/hedera-services/451/452 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/452 | https://github.com/hashgraph/hedera-services/pull/451 | https://github.com/hashgraph/hedera-services/pull/451 | 1 | closes | Fix ZeroStakeTest | <!-- Thanks for submitting a bug report! Before submitting:
1. Try searching the existing issues to see if your issue has already been reported
2. If you're reporting a security vulnerability, please email security@hedera.com instead of opening an issue
-->
**Summary of the defect**
ZeroStakeTest currently have a constructor that is inconsistent with the general design of other HapiSpecs.
**How to reproduce (if possible)**
Run such-and-such `ZeroStakeTest ` with default constructor and the test fails.
| 587ce01d0036a5cd711937e32eef7cd7e539a298 | 2e891db51ed85e1cbc9f524a0e0a30f9e281d10b | https://github.com/hashgraph/hedera-services/compare/587ce01d0036a5cd711937e32eef7cd7e539a298...2e891db51ed85e1cbc9f524a0e0a30f9e281d10b | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
index cea29b652d..b4de8ca564 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java
@@ -184,7 +184,7 @@ public class SuiteRunner {
/* Freeze and update */
put("UpdateServerFiles", aof(new UpdateServerFiles()));
/* Zero Stake behaviour */
- put("ZeroStakeTest", aof(new ZeroStakeNodeTest(System.getenv("FULLIPLIST"))));
+ put("ZeroStakeTest", aof(new ZeroStakeNodeTest()));
}};
static boolean runAsync;
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ZeroStakeNodeTest.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ZeroStakeNodeTest.java
index 07c280cd24..95f0e04914 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ZeroStakeNodeTest.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ZeroStakeNodeTest.java
@@ -26,28 +26,14 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.math.BigInteger;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import static com.hedera.services.bdd.spec.HapiApiSpec.customHapiSpec;
+import static com.hedera.services.bdd.spec.HapiApiSpec.defaultHapiSpec;
import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot;
import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.isLiteralResult;
import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractBytecode;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractRecords;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileInfo;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractDelete;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractUpdate;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer;
-import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate;
+import static com.hedera.services.bdd.spec.queries.QueryVerbs.*;
+import static com.hedera.services.bdd.spec.transactions.TxnVerbs.*;
import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.balanceSnapshot;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE;
@@ -60,20 +46,9 @@ public class ZeroStakeNodeTest extends HapiApiSuite {
"\\"outputs\\":[{\\"internalType\\":\\"uint32\\",\\"name\\":\\"\\",\\"type\\":\\"uint32\\"}],\\"payable\\":false," +
"\\"stateMutability\\":\\"view\\",\\"type\\":\\"function\\"}";
- private final String nodes;
-
- public ZeroStakeNodeTest(String nodes) {
- this.nodes = nodes;
- HapiApiSpec.runInCiMode(
- nodes,
- "3",
- "OFF",
- "FIXED",
- new HashMap<String, String>());
- }
public static void main(String... args) throws Exception {
- new ZeroStakeNodeTest("<FIXME>").runSuiteSync();
+ new ZeroStakeNodeTest().runSuiteSync();
}
@Override
@@ -84,20 +59,20 @@ public class ZeroStakeNodeTest extends HapiApiSuite {
);
}
- /* Assumes that node 0.0.7 and node 0.0.8 are started with zero stake in a 6 node network. */
+ /** This test has to be run with nodes in spec-defaults set as the full list of ipAddresses and node ids of the network
+ * with zero stake nodes. Assumes that node 0.0.7 and node 0.0.8 are started with zero stake in a 6 node network.
+ **/
private HapiApiSpec zeroStakeBehavesAsExpectedJRS() {
- return customHapiSpec("zeroStakeBehavesAsExpectedJRS")
- .withProperties(Map.of(
- "nodes", nodes)
- ).given(
+ return defaultHapiSpec("zeroStakeBehavesAsExpectedJRS")
+ .given(
cryptoCreate("sponsor"),
cryptoCreate("beneficiary"),
fileCreate("bytecode").fromResource("Multipurpose.bin"),
contractCreate("multi").bytecode("bytecode"),
-// contractCreate("impossible")
-// .setNode("0.0.7")
-// .bytecode("bytecode")
-// .hasPrecheck(INVALID_NODE_ACCOUNT),
+ contractCreate("impossible")
+ .setNode("0.0.7")
+ .bytecode("bytecode")
+ .hasPrecheck(INVALID_NODE_ACCOUNT),
contractUpdate("multi")
.setNode("0.0.8")
.newMemo("Oops!") | ['test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ZeroStakeNodeTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,481,616 | 1,501,152 | 171,007 | 869 | 2,731 | 699 | 53 | 2 | 522 | 75 | 112 | 11 | 0 | 0 | 1970-01-01T00:26:37 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,493 | hashgraph/hedera-services/364/258 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/258 | https://github.com/hashgraph/hedera-services/pull/364 | https://github.com/hashgraph/hedera-services/pull/364 | 1 | closes | Hedera services shouldn't proceed if initial total ledger balance doesn't add up | **Summary of the defect**
When hedera services starts and the initial total ledger balance doesn't add up the expected value, the services should stop and refuse to move forward. Also this should be a severe ERROR, not just an WARN message.
**How to reproduce (if possible)**
When in CircleCi running three test jobs in parallel, this issue surfaces. Please refer to this URL for more info: https://app.circleci.com/pipelines/github/hashgraph/hedera-services/715/workflows/25649e74-eb55-45f0-a8ce-54e36f6f6bdf/jobs/9371
**Service logs (if applicable)**
Log message excerpt from CircleCi is here:
```
>>>> [CI] >> ================
>>>> [CI] >> === /repo/HapiApp2.0/18.237.197.98/output/hgcaa.log ===
>>>> [CI] >> ================
2020-06-08 22:15:32.908 INFO 87 ServicesState - Initializing context of Services node 0 with platform and address book...
2020-06-08 22:15:32.928 INFO 94 ServicesState - ...done, context is set for Services node 0!
2020-06-08 22:15:32.936 INFO 321 ServicesMain - Using context to initialize HederaNode#0...
2020-06-08 22:15:33.037 INFO 171 ServicesMain - Property sources are available.
2020-06-08 22:15:33.044 INFO 173 ServicesMain - Platform is configured.
2020-06-08 22:15:33.044 INFO 175 ServicesMain - Migrations complete.
2020-06-08 22:15:33.060 WARN 307 ServicesMain - Unexpected total balance in ledger, nodeId=0!
2020-06-08 22:15:33.078 INFO 177 ServicesMain - Ledger state ok.
2020-06-08 22:15:33.151 INFO 52 RecordCache - Constructing the RecordCache with TTL 180s
2020-06-08 22:15:33.190 INFO 224 HfsSystemFilesManager - --- Bootstrapping network properties from 'data/config/application.properties' as below ---
2020-06-08 22:15:33.193 INFO 228 HfsSystemFilesManager - NETTY_CHANNEL_TYPE = EPOLL
2020-06-08 22:15:33.194 INFO 228 HfsSystemFilesManager - accountBalanceExportDir = data/accountBalances/
2020-06-08 22:15:33.195 INFO 228 HfsSystemFilesManager - accountBalanceExportEnabled = true
2020-06-08 22:15:33.195 INFO 228 HfsSystemFilesManager - accountBalanceExportPeriodMinutes = 10
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 12.0.2]
- Hedera Services Version: [0.5.0]
- HAPI Version: []
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| 6b5ac50d51ac9e3271c2c5a2d27a8873ecbe396b | 7990a2ee339db2a3df32c16b2612dfb39d0802aa | https://github.com/hashgraph/hedera-services/compare/6b5ac50d51ac9e3271c2c5a2d27a8873ecbe396b...7990a2ee339db2a3df32c16b2612dfb39d0802aa | diff --git a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
index f26bbe8e1c..ef2ad4c73d 100644
--- a/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
+++ b/hedera-node/src/main/java/com/hedera/services/ServicesMain.java
@@ -279,7 +279,8 @@ public class ServicesMain implements SwirldMain {
private void validateLedgerState() {
ctx.ledgerValidator().assertIdsAreValid(ctx.accounts());
if (!ctx.ledgerValidator().hasExpectedTotalBalance(ctx.accounts())) {
- log.warn("Unexpected total balance in ledger, nodeId={}!", ctx.id());
+ log.error("Unexpected total balance in ledger, nodeId={}!", ctx.id());
+ systemExits.fail(1);
}
if (ctx.nodeAccount() == null) {
throwIseOrLogError(new IllegalStateException("Unknown ledger account!"));
diff --git a/hedera-node/src/main/java/com/hedera/services/legacy/handler/TransactionHandler.java b/hedera-node/src/main/java/com/hedera/services/legacy/handler/TransactionHandler.java
index a2e6c7a299..f2e894cb14 100644
--- a/hedera-node/src/main/java/com/hedera/services/legacy/handler/TransactionHandler.java
+++ b/hedera-node/src/main/java/com/hedera/services/legacy/handler/TransactionHandler.java
@@ -615,7 +615,7 @@ public class TransactionHandler {
}
/**
- * Validates Account IDs and Total Balance in Account Map on Start Up .
+ * Validates Account IDs and Total Balance in Account Map on Start Up .
* If it finds any invalid Account ID it stops checking further and returns Invalid Account ID response code.
* If all the Account IDs are valid, it checks the total balance. If its not equal to expected balance, it
* returns Invalid Balance response code.
@@ -639,7 +639,7 @@ public class TransactionHandler {
break;
}
}
- if(response == OK) {
+ if(response == OK && !accountMap.isEmpty()) {
if(totalBalance != PropertiesLoader.getInitialGenesisCoins()) {
response = ResponseCodeEnum.TOTAL_LEDGER_BALANCE_INVALID;
}
diff --git a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
index 2ff7abbe09..3254562124 100644
--- a/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java
@@ -67,6 +67,7 @@ import java.time.Instant;
import static com.hedera.services.context.SingletonContextsManager.CONTEXTS;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.BDDMockito.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.inOrder;
@@ -76,6 +77,7 @@ import static org.mockito.BDDMockito.never;
import static org.mockito.BDDMockito.verify;
import static org.mockito.BDDMockito.verifyNoInteractions;
import static org.mockito.BDDMockito.willThrow;
+import static org.mockito.Mockito.atLeastOnce;
@RunWith(JUnitPlatform.class)
public class ServicesMainTest {
@@ -184,9 +186,48 @@ public class ServicesMainTest {
ServicesMain.log = LogManager.getLogger(ServicesMain.class);
}
+ @Test
+ public void shouldAlwaysFailFastOnUnexpectedInitialLedgerBalance() {
+ given(properties.getBooleanProperty("hedera.exitOnNodeStartupFailure")).willReturn(anyBoolean());
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(false);
+
+ // when:
+ subject.init(null, new NodeId(false, NODE_ID));
+
+ // then:
+ verify(systemExits,atLeastOnce()).fail(1);
+ }
+
+ @Test
+ public void shouldNotFailOnExpectedInitialLedgerBalance() {
+ given(properties.getBooleanProperty("hedera.exitOnNodeStartupFailure")).willReturn(true);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
+
+ // when:
+ subject.init(null, new NodeId(false, NODE_ID));
+
+ // then:
+ verify(systemExits,never()).fail(1);
+ }
+
+ @Test
+ public void shouldFailFastOnMissingNodeAccountIdIfSkippingNotExits() {
+ given(properties.getBooleanProperty("hedera.exitOnNodeStartupFailure")).willReturn(true);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
+ given(ctx.nodeAccount()).willReturn(null);
+
+ // when:
+ subject.init(null, new NodeId(false, NODE_ID));
+
+ // then:
+ verify(systemExits, atLeastOnce()).fail(1);
+ }
+
+
@Test
public void doesntFailFastOnMissingNodeAccountIdIfSkippingExits() {
given(properties.getBooleanProperty("hedera.exitOnNodeStartupFailure")).willReturn(false);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
given(ctx.nodeAccount()).willReturn(null);
// when:
@@ -196,10 +237,13 @@ public class ServicesMainTest {
verify(systemExits, never()).fail(1);
}
+
+
@Test
public void failsFastOnNonUtf8DefaultCharset() {
// setup:
subject.defaultCharset = () -> StandardCharsets.US_ASCII;
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
// when:
subject.init(null, new NodeId(false, NODE_ID));
@@ -216,13 +260,14 @@ public class ServicesMainTest {
subject.init(null, new NodeId(false, NODE_ID));
// then:
- verify(systemExits).fail(1);
+ verify(systemExits, atLeastOnce()).fail(1);
}
@Test
public void exitsOnApplicationPropertiesLoading() {
willThrow(IllegalStateException.class)
.given(systemFilesManager).loadApplicationProperties();
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
// when:
subject.init(null, new NodeId(false, NODE_ID));
@@ -235,6 +280,7 @@ public class ServicesMainTest {
public void exitsOnAddressBookCreationFailure() {
willThrow(IllegalStateException.class)
.given(systemFilesManager).createAddressBookIfMissing();
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
// when:
subject.init(null, new NodeId(false, NODE_ID));
@@ -247,6 +293,7 @@ public class ServicesMainTest {
public void exitsOnCreationFailure() throws Exception {
given(properties.getBooleanProperty("hedera.createSystemAccountsOnStartup")).willReturn(true);
given(properties.getBooleanProperty("hedera.exitOnNodeStartupFailure")).willReturn(true);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
willThrow(Exception.class)
.given(systemAccountsCreator).createSystemAccounts(any(), any());
@@ -428,6 +475,7 @@ public class ServicesMainTest {
public void rethrowsAccountsCreationFailureAsIse() {
given(properties.getBooleanProperty("hedera.createSystemAccountsOnStartup")).willReturn(true);
given(ctx.systemAccountsCreator()).willReturn(null);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
// when:
subject.init(null, new NodeId(false, NODE_ID));
@@ -453,6 +501,7 @@ public class ServicesMainTest {
given(properties.getStringProperty("hedera.accountsExportPath")).willReturn(PATH);
given(properties.getBooleanProperty("hedera.exportAccountsOnStartup")).willReturn(true);
given(ctx.accountsExporter()).willReturn(null);
+ given(ledgerValidator.hasExpectedTotalBalance(accounts)).willReturn(true);
// when:
subject.init(null, new NodeId(false, NODE_ID));
diff --git a/hedera-node/src/test/java/com/hedera/services/legacy/unit/CrptDelAcctValtionAndStartupBalCheckTest.java b/hedera-node/src/test/java/com/hedera/services/legacy/unit/CrptDelAcctValtionAndStartupBalCheckTest.java
index b268e9bf46..7400c9ec25 100644
--- a/hedera-node/src/test/java/com/hedera/services/legacy/unit/CrptDelAcctValtionAndStartupBalCheckTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/legacy/unit/CrptDelAcctValtionAndStartupBalCheckTest.java
@@ -56,6 +56,8 @@ import com.hedera.services.legacy.core.jproto.JKey;
import net.i2p.crypto.eddsa.EdDSAPrivateKey;
import net.i2p.crypto.eddsa.EdDSAPublicKey;
import net.i2p.crypto.eddsa.KeyPairGenerator;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.platform.runner.JUnitPlatform;
public class CrptDelAcctValtionAndStartupBalCheckTest {
@@ -102,27 +104,78 @@ public class CrptDelAcctValtionAndStartupBalCheckTest {
keys = Key.newBuilder().setKeyList(KeyList.newBuilder().addAllKeys(keyListp).build()).build();
}
- @Test
- public void testAccountMapBalanceForStartup() throws Exception {
- long account1Balance = 100000l;
- long account2Balance = 200000l;
- // Total Balance is less than 50B
- account1ID = RequestBuilder.getAccountIdBuild(1022l, 0l, 0l);
- account2ID = RequestBuilder.getAccountIdBuild(1023l, 0l, 0l);
- createAccount(account1ID, account1Balance, keys);
- createAccount(account2ID, account2Balance, keys);
- ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
- Assert.assertEquals(ResponseCodeEnum.TOTAL_LEDGER_BALANCE_INVALID, response);
+ @Test
+ public void testAccountMapBalanceForStartup() throws Exception {
+ long account1Balance = 100000l;
+ long account2Balance = 200000l;
+ // Total Balance is less than 50B
+ account1ID = RequestBuilder.getAccountIdBuild(1022l, 0l, 0l);
+ account2ID = RequestBuilder.getAccountIdBuild(1023l, 0l, 0l);
+ createAccount(account1ID, account1Balance, keys);
+ createAccount(account2ID, account2Balance, keys);
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.TOTAL_LEDGER_BALANCE_INVALID, response);
+
+ // Total balance is 50B
+ account3ID = RequestBuilder.getAccountIdBuild(1024l, 0l, 0l);
+ long account3Balance = 5000000000000000000l - (account2Balance + account1Balance);
+ createAccount(account3ID, account3Balance, keys);
+ response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.OK, response);
+ }
- // Total balance is 50B
- account3ID = RequestBuilder.getAccountIdBuild(1024l, 0l, 0l);
- long account3Balance = 5000000000000000000l - (account2Balance + account1Balance);
- createAccount(account3ID, account3Balance, keys);
- response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
- Assert.assertEquals(ResponseCodeEnum.OK, response);
+ @Test
+ public void testInvalidAccountIdWithZeroAcctNumForStartup() throws Exception {
+ long account1Balance = 100000l;
+ // account id as 0.0.0
+ account1ID = RequestBuilder.getAccountIdBuild(0l, 0l, 0l);
+ createAccount(account1ID, account1Balance, keys);
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.INVALID_ACCOUNT_ID, response);
+ }
+ @Test
+ public void testInvalidAccountIdWithTooLargeAcctNumForStartup2() throws Exception {
+ long account1Balance = 100000l;
+ // account num as larger than allowef
+ account1ID = RequestBuilder.getAccountIdBuild(PropertiesLoader.getConfigAccountNum() + 1, 0l, 0l);
+ createAccount(account1ID, account1Balance, keys);
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.INVALID_ACCOUNT_ID, response);
+ }
- }
+ @Test
+ public void testInvalidAccountIdDueToInvalidRealmForStartup() throws Exception {
+ fcMap.clear();
+ long account1Balance = 100000l;
+ // account id has bad realm value
+ account1ID = RequestBuilder.getAccountIdBuild(1021l, -1l, 0l);
+ createAccount(account1ID, account1Balance, keys);
+
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.INVALID_ACCOUNT_ID, response);
+ }
+
+ @Test
+ public void testInvalidAccountIdDueToInvalidShardForStartup() throws Exception {
+ fcMap.clear();
+ long account1Balance = 100000l;
+ // account id has bad shard value
+ account1ID = RequestBuilder.getAccountIdBuild(1022l, 0l, 100l);
+ createAccount(account1ID, account1Balance, keys);
+
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.INVALID_ACCOUNT_ID, response);
+ }
+
+
+ @Test
+ public void testEmptyAccountMapBalanceForStartup() throws Exception {
+ fcMap.clear();
+ ResponseCodeEnum response = TransactionHandler.validateAccountIDAndTotalBalInMap(fcMap);
+ Assert.assertEquals(ResponseCodeEnum.OK, response);
+
+ }
private static Key PrivateKeyToKey(PrivateKey privateKey) {
byte[] pubKey = ((EdDSAPrivateKey) privateKey).getAbyte();
@@ -134,13 +187,14 @@ public class CrptDelAcctValtionAndStartupBalCheckTest {
byte[] pubKey = ((EdDSAPublicKey) pair.getPublic()).getAbyte();
String pubKeyHex = MiscUtils.commonsBytesToHex(pubKey);
pubKey2privKeyMap.put(pubKeyHex, pair.getPrivate());
- }
+ }
private void createAccount(AccountID payerAccount, long balance, Key key) throws Exception {
MerkleEntityId mk = new MerkleEntityId();
mk.setNum(payerAccount.getAccountNum());
- mk.setRealm(0);
+ mk.setRealm(payerAccount.getRealmNum());
+ mk.setShard(payerAccount.getShardNum());
MerkleAccount mv = new MerkleAccount();
mv.setBalance(balance);
JKey jkey = JKey.mapKey(key); | ['hedera-node/src/main/java/com/hedera/services/ServicesMain.java', 'hedera-node/src/main/java/com/hedera/services/legacy/handler/TransactionHandler.java', 'hedera-node/src/test/java/com/hedera/services/ServicesMainTest.java', 'hedera-node/src/test/java/com/hedera/services/legacy/unit/CrptDelAcctValtionAndStartupBalCheckTest.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 6,424,139 | 1,485,631 | 169,018 | 860 | 401 | 90 | 7 | 2 | 2,400 | 284 | 738 | 58 | 1 | 1 | 1970-01-01T00:26:35 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,469 | hashgraph/hedera-services/1138/1136 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1136 | https://github.com/hashgraph/hedera-services/pull/1138 | https://github.com/hashgraph/hedera-services/pull/1138 | 1 | closes | Node payments for a scheduled txn should go to node submitting the triggering txn | **Summary of the defect**
`ItemizableFeeCharging` uses the [node account in the `TransactionBody`](https://github.com/hashgraph/hedera-services/blob/master/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java#L97) to identify where to transfer node payments.
This is always correct for a normal transaction, but for a scheduled transaction this account should be ignored; in fact, the payer for a triggered transaction **does not owe any node fees**.
**Suggested fix**
- In `ItemizableFeeCharging` only use `submittingMember` for the node account, since this is always identical to the node account from the transaction body when `NODE` fees are being paid.
- Add a `TxnFeeChargingPolicy.applyForTriggered()` method which discounts the node fee to 0. (This is totally analogous to `TxnFeeChargingPolicy.applyForDuplicate()` which discounts the service fee to 0.) | fa996f9cef6bacd3ad138203e013dcaad53103b2 | 5f05bf1dd1e8893906cec3517fbf1e01daa645ee | https://github.com/hashgraph/hedera-services/compare/fa996f9cef6bacd3ad138203e013dcaad53103b2...5f05bf1dd1e8893906cec3517fbf1e01daa645ee | diff --git a/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java b/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java
index 272dae19de..9884fb118a 100644
--- a/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java
+++ b/hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java
@@ -52,13 +52,13 @@ import static com.hedera.services.fees.TxnFeeType.*;
public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements TxnScopedFeeCharging {
public static EnumSet<TxnFeeType> NODE_FEE = EnumSet.of(NODE);
public static EnumSet<TxnFeeType> NETWORK_FEE = EnumSet.of(NETWORK);
+ public static EnumSet<TxnFeeType> SERVICE_FEE = EnumSet.of(SERVICE);
public static EnumSet<TxnFeeType> NETWORK_NODE_SERVICE_FEES = EnumSet.of(NETWORK, NODE, SERVICE);
private HederaLedger ledger;
private final GlobalDynamicProperties properties;
- AccountID node;
AccountID funding;
AccountID submittingNode;
EnumMap<TxnFeeType, Long> payerFeesCharged = new EnumMap<>(TxnFeeType.class);
@@ -94,7 +94,6 @@ public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements T
public void resetFor(TxnAccessor accessor, AccountID submittingNode) {
super.resetFor(accessor);
- node = accessor.getTxn().getNodeAccountID();
funding = properties.fundingAccount();
this.submittingNode = submittingNode;
@@ -120,11 +119,10 @@ public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements T
public TransferList itemizedFees() {
TransferList.Builder fees = TransferList.newBuilder();
- if (!submittingNodeFeesCharged.isEmpty()) {
+ AccountID payer = accessor.getPayer();
+ if (!payer.equals(submittingNode) && !submittingNodeFeesCharged.isEmpty()) {
includeIfCharged(NETWORK, submittingNode, submittingNodeFeesCharged, fees);
} else {
- AccountID payer = accessor.getPayer();
-
includeIfCharged(NETWORK, payer, payerFeesCharged, fees);
includeIfCharged(NODE, payer, payerFeesCharged, fees);
includeIfCharged(SERVICE, payer, payerFeesCharged, fees);
@@ -140,7 +138,7 @@ public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements T
TransferList.Builder fees
) {
if (feesCharged.containsKey(fee)) {
- AccountID receiver = (fee == NODE) ? node : funding;
+ AccountID receiver = (fee == NODE) ? submittingNode : funding;
fees.addAllAccountAmounts(receiverFirst(source, receiver, feesCharged.get(fee)));
}
}
@@ -177,7 +175,7 @@ public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements T
public void chargePayerUpTo(EnumSet<TxnFeeType> fees) {
pay(
fees,
- () -> chargeUpTo(accessor.getPayer(), node, NODE),
+ () -> chargeUpTo(accessor.getPayer(), submittingNode, NODE),
(fee) -> chargeUpTo(accessor.getPayer(), funding, fee));
}
@@ -185,7 +183,7 @@ public class ItemizableFeeCharging extends FieldSourcedFeeScreening implements T
public void chargeParticipant(AccountID participant, EnumSet<TxnFeeType> fees) {
pay(
fees,
- () -> charge(participant, node, NODE),
+ () -> charge(participant, submittingNode, NODE),
fee -> charge(participant, funding, fee));
}
diff --git a/hedera-node/src/main/java/com/hedera/services/fees/charging/TxnFeeChargingPolicy.java b/hedera-node/src/main/java/com/hedera/services/fees/charging/TxnFeeChargingPolicy.java
index 3b2758c922..428101e4b7 100644
--- a/hedera-node/src/main/java/com/hedera/services/fees/charging/TxnFeeChargingPolicy.java
+++ b/hedera-node/src/main/java/com/hedera/services/fees/charging/TxnFeeChargingPolicy.java
@@ -31,18 +31,20 @@ import static com.hedera.services.fees.TxnFeeType.SERVICE;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NETWORK_FEE;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NETWORK_NODE_SERVICE_FEES;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NODE_FEE;
+import static com.hedera.services.fees.charging.ItemizableFeeCharging.SERVICE_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_PAYER_BALANCE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
/**
* Provides the transaction fee-charging policy for the processing
- * logic. The policy offers three basic entry points:
+ * logic. The policy offers four basic entry points:
* <ol>
* <li>For a txn whose submitting node seemed to ignore due diligence
* (e.g. submitted a txn with an impermissible valid duration); and, </li>
* <li>For a txn that looks to have been submitted responsibly, but is
* a duplicate of a txn already submitted by a different node; and,</li>
+ * <li>For a triggered txn; and,</li>
* <li>For a txn that was submitted responsibly, and is believed unique.</li>
* </ol>
*
@@ -64,6 +66,27 @@ public class TxnFeeChargingPolicy {
return applyWithDiscount(charging, fee, NO_DISCOUNT);
}
+ /**
+ * Apply the fee charging policy to a txn that was submitted responsibly, but
+ * is a triggered txn rather than a parent txn requiring node precheck work.
+ *
+ * @param charging the charging facility to use
+ * @param fee the fee to charge
+ * @return the outcome of applying the policy
+ */
+ public ResponseCodeEnum applyForTriggered(ItemizableFeeCharging charging, FeeObject fee) {
+ charging.setFor(SERVICE, fee.getServiceFee());
+
+ if (!charging.isPayerWillingToCover(SERVICE_FEE)) {
+ return INSUFFICIENT_TX_FEE;
+ } else if (!charging.canPayerAfford(SERVICE_FEE)) {
+ return INSUFFICIENT_PAYER_BALANCE;
+ } else {
+ charging.chargePayer(SERVICE_FEE);
+ return OK;
+ }
+ }
+
/**
* Apply the fee charging policy to a txn that was submitted responsibly, but
* is a duplicate of a txn already submitted by a different node.
@@ -112,6 +135,7 @@ public class TxnFeeChargingPolicy {
ItemizableFeeCharging charging,
Consumer<ItemizableFeeCharging> discount
) {
+ discount.accept(charging);
if (!charging.isPayerWillingToCover(NETWORK_NODE_SERVICE_FEES)) {
penalizePayer(charging);
return INSUFFICIENT_TX_FEE;
@@ -119,7 +143,6 @@ public class TxnFeeChargingPolicy {
penalizePayer(charging);
return INSUFFICIENT_PAYER_BALANCE;
} else {
- discount.accept(charging);
charging.chargePayer(NETWORK_NODE_SERVICE_FEES);
return OK;
}
diff --git a/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java b/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
index 197c37176f..1e6d67ab2d 100644
--- a/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
+++ b/hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java
@@ -188,7 +188,7 @@ public class AwareProcessLogic implements ProcessLogic {
FeeObject fee = ctx.fees().computeFee(accessor, ctx.txnCtx().activePayerKey(), ctx.currentView());
- var chargingOutcome = ctx.txnChargingPolicy().apply(ctx.charging(), fee);
+ var chargingOutcome = ctx.txnChargingPolicy().applyForTriggered(ctx.charging(), fee);
if (chargingOutcome != OK) {
ctx.txnCtx().setStatus(chargingOutcome);
return;
diff --git a/hedera-node/src/test/java/com/hedera/services/fees/charging/ItemizableFeeChargingTest.java b/hedera-node/src/test/java/com/hedera/services/fees/charging/ItemizableFeeChargingTest.java
index 5ac9969715..a13b51c8fd 100644
--- a/hedera-node/src/test/java/com/hedera/services/fees/charging/ItemizableFeeChargingTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/fees/charging/ItemizableFeeChargingTest.java
@@ -34,7 +34,6 @@ import org.junit.jupiter.api.Test;
import java.util.EnumMap;
import java.util.EnumSet;
-import java.util.Set;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NETWORK_NODE_SERVICE_FEES;
import static org.hamcrest.MatcherAssert.assertThat;
@@ -71,7 +70,7 @@ class ItemizableFeeChargingTest {
exemptions = mock(FeeExemptions.class);
properties = mock(GlobalDynamicProperties.class);
- given(txn.getNodeAccountID()).willReturn(givenNode);
+ given(txn.getNodeAccountID()).willThrow(IllegalStateException.class);
given(accessor.getTxn()).willReturn(txn);
given(accessor.getPayer()).willReturn(payer);
given(properties.fundingAccount()).willReturn(funding);
@@ -122,7 +121,7 @@ class ItemizableFeeChargingTest {
@Test
public void doesntRecordSelfPayments() {
givenKnownFeeAmounts();
- given(accessor.getPayer()).willReturn(givenNode);
+ given(accessor.getPayer()).willReturn(submittingNode);
// when:
subject.chargePayer(EnumSet.of(NODE));
@@ -213,7 +212,7 @@ class ItemizableFeeChargingTest {
public void itemizesWhenNodeIsPayer() {
givenKnownFeeAmounts();
given(ledger.getBalance(any())).willReturn(Long.MAX_VALUE);
- given(accessor.getPayer()).willReturn(givenNode);
+ given(accessor.getPayer()).willReturn(submittingNode);
// when:
subject.chargePayer(NETWORK_NODE_SERVICE_FEES);
@@ -225,9 +224,9 @@ class ItemizableFeeChargingTest {
itemizedFees.getAccountAmountsList(),
contains(
aa(funding, network),
- aa(givenNode, -network),
+ aa(submittingNode, -network),
aa(funding, service),
- aa(givenNode, -service)));
+ aa(submittingNode, -service)));
}
@Test
@@ -246,7 +245,7 @@ class ItemizableFeeChargingTest {
contains(
aa(funding, network),
aa(payer, -network),
- aa(givenNode, node),
+ aa(submittingNode, node),
aa(payer, -node),
aa(funding, service),
aa(payer, -service)));
@@ -266,7 +265,7 @@ class ItemizableFeeChargingTest {
// then:
verify(ledger).doTransfer(participant, funding, network);
verify(ledger).doTransfer(participant, funding, service);
- verify(ledger).doTransfer(participant, givenNode, node);
+ verify(ledger).doTransfer(participant, submittingNode, node);
// and:
assertTrue(subject.submittingNodeFeesCharged.isEmpty());
assertTrue(subject.payerFeesCharged.isEmpty());
@@ -282,7 +281,7 @@ class ItemizableFeeChargingTest {
// then:
verify(ledger).doTransfer(payer, funding, network);
verify(ledger).doTransfer(payer, funding, service);
- verify(ledger).doTransfer(payer, givenNode, node);
+ verify(ledger).doTransfer(payer, submittingNode, node);
// and:
assertEquals(network, subject.payerFeesCharged.get(NETWORK).longValue());
assertEquals(service, subject.payerFeesCharged.get(SERVICE).longValue());
@@ -301,7 +300,7 @@ class ItemizableFeeChargingTest {
// then:
verify(ledger).doTransfer(payer, funding, network);
- verify(ledger).doTransfer(payer, givenNode, node / 2);
+ verify(ledger).doTransfer(payer, submittingNode, node / 2);
// and:
assertEquals(network, subject.payerFeesCharged.get(NETWORK).longValue());
assertEquals(node / 2, subject.payerFeesCharged.get(NODE).longValue());
diff --git a/hedera-node/src/test/java/com/hedera/services/fees/charging/TxnFeeChargingPolicyTest.java b/hedera-node/src/test/java/com/hedera/services/fees/charging/TxnFeeChargingPolicyTest.java
index 454eb3c55a..c75d76f4e3 100644
--- a/hedera-node/src/test/java/com/hedera/services/fees/charging/TxnFeeChargingPolicyTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/fees/charging/TxnFeeChargingPolicyTest.java
@@ -39,6 +39,7 @@ import static com.hedera.services.fees.TxnFeeType.SERVICE;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NETWORK_FEE;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NETWORK_NODE_SERVICE_FEES;
import static com.hedera.services.fees.charging.ItemizableFeeCharging.NODE_FEE;
+import static com.hedera.services.fees.charging.ItemizableFeeCharging.SERVICE_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_PAYER_BALANCE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
@@ -100,7 +101,44 @@ class TxnFeeChargingPolicyTest {
}
@Test
- public void liveFireDiscountWorks() {
+ public void liveFireWorksForTriggered() {
+ // setup:
+ TransactionBody txn = mock(TransactionBody.class);
+ AccountID submittingNode = IdUtils.asAccount("0.0.3");
+ AccountID payer = IdUtils.asAccount("0.0.1001");
+ AccountID funding = IdUtils.asAccount("0.0.98");
+ HederaLedger ledger = mock(HederaLedger.class);
+ GlobalDynamicProperties properties = mock(GlobalDynamicProperties.class);
+ SignedTxnAccessor accessor = mock(SignedTxnAccessor.class);
+ charging = new ItemizableFeeCharging(ledger, new NoExemptions(), properties);
+
+ given(ledger.getBalance(any())).willReturn(Long.MAX_VALUE);
+ given(properties.fundingAccount()).willReturn(funding);
+ given(txn.getTransactionFee()).willReturn(10L);
+ given(accessor.getTxn()).willReturn(txn);
+
+ given(accessor.getPayer()).willReturn(payer);
+
+ // when:
+ charging.resetFor(accessor, submittingNode);
+ ResponseCodeEnum outcome = subject.applyForTriggered(charging, fee);
+
+ // then:
+ verify(ledger).doTransfer(payer, funding, service);
+ verify(ledger, never()).doTransfer(
+ argThat(payer::equals),
+ argThat(funding::equals),
+ longThat(l -> l == network));
+ verify(ledger, never()).doTransfer(
+ argThat(payer::equals),
+ argThat(submittingNode::equals),
+ longThat(l -> l == node));
+ // and:
+ assertEquals(OK, outcome);
+ }
+
+ @Test
+ public void liveFireDiscountWorksForDuplicate() {
// setup:
TransactionBody txn = mock(TransactionBody.class);
AccountID submittingNode = IdUtils.asAccount("0.0.3");
@@ -207,6 +245,38 @@ class TxnFeeChargingPolicyTest {
assertEquals(INSUFFICIENT_TX_FEE, outcome);
}
+ @Test
+ public void requiresWillingToPayServiceWhenTriggeredTxn() {
+ given(charging.isPayerWillingToCover(SERVICE_FEE)).willReturn(false);
+
+ // when:
+ ResponseCodeEnum outcome = subject.applyForTriggered(charging, fee);
+
+ // then:
+ verify(charging).setFor(SERVICE, service);
+ // and:
+ verify(charging).isPayerWillingToCover(SERVICE_FEE);
+ // and:
+ assertEquals(INSUFFICIENT_TX_FEE, outcome);
+ }
+
+ @Test
+ public void requiresAbleToPayServiceWhenTriggeredTxn() {
+ given(charging.isPayerWillingToCover(SERVICE_FEE)).willReturn(true);
+ given(charging.canPayerAfford(SERVICE_FEE)).willReturn(false);
+
+ // when:
+ ResponseCodeEnum outcome = subject.applyForTriggered(charging, fee);
+
+ // then:
+ verify(charging).setFor(SERVICE, service);
+ // and:
+ verify(charging).isPayerWillingToCover(SERVICE_FEE);
+ verify(charging).canPayerAfford(SERVICE_FEE);
+ // and:
+ assertEquals(INSUFFICIENT_PAYER_BALANCE, outcome);
+ }
+
@Test
public void chargesNodePenaltyForPayerUnableToPayNetwork() {
given(charging.isPayerWillingToCover(NETWORK_FEE)).willReturn(true);
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecOperation.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecOperation.java
index e7cac897e1..a9ff3c8742 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecOperation.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecOperation.java
@@ -95,6 +95,7 @@ public abstract class HapiSpecOperation {
protected boolean omitTxnId = false;
protected boolean loggingOff = false;
protected boolean suppressStats = false;
+ protected boolean omitNodeAccount = false;
protected boolean verboseLoggingOn = false;
protected boolean shouldRegisterTxn = false;
protected boolean useDefaultTxnAsCostAnswerPayment = false;
@@ -262,7 +263,11 @@ public abstract class HapiSpecOperation {
builder.getTransactionIDBuilder().setNonce(ByteString.copyFrom(nonce));
}
- node.ifPresent(builder::setNodeAccountID);
+ if (omitNodeAccount) {
+ builder.clearNodeAccountID();
+ } else {
+ node.ifPresent(builder::setNodeAccountID);
+ }
validDurationSecs.ifPresent(s -> {
builder.setTransactionValidDuration(Duration.newBuilder().setSeconds(s).build());
});
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/assertions/TransferListAsserts.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/assertions/TransferListAsserts.java
index 6d221d422f..804e9c049d 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/assertions/TransferListAsserts.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/assertions/TransferListAsserts.java
@@ -9,9 +9,9 @@ package com.hedera.services.bdd.spec.assertions;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -43,24 +43,34 @@ import static com.hedera.services.bdd.spec.transactions.TxnUtils.readableTransfe
import static java.util.stream.Collectors.toSet;
public class TransferListAsserts extends BaseErroringAssertsProvider<TransferList> {
+ public static TransferListAsserts exactParticipants(Function<HapiApiSpec, List<AccountID>> provider) {
+ return new ExactParticipantsAssert(provider);
+ }
+
public static TransferListAsserts including(Function<HapiApiSpec, TransferList>... providers) {
return new ExplicitTransferAsserts(Arrays.asList(providers));
}
+
public static TransferListAsserts includingDeduction(LongSupplier from, long amount) {
return new DeductionAsserts(from, amount);
}
+
public static TransferListAsserts includingDeduction(String desc, String payer) {
return new QualifyingDeductionAssert(desc, payer);
}
+
public static TransferListAsserts atLeastOneTransfer() {
return new NonEmptyTransferAsserts();
}
+
public static TransferListAsserts missingPayments(Function<HapiApiSpec, Map.Entry<AccountID, Long>>... providers) {
return new MissingPaymentAsserts(providers);
}
+
public static Function<HapiApiSpec, Map.Entry<AccountID, Long>> to(String account, Long amount) {
return spec -> new AbstractMap.SimpleEntry<>(spec.registry().getAccountID(account), amount);
}
+
public static Function<HapiApiSpec, Map.Entry<AccountID, Long>> from(String account, Long amount) {
return spec -> new AbstractMap.SimpleEntry<>(spec.registry().getAccountID(account), -1 * amount);
}
@@ -77,10 +87,10 @@ public class TransferListAsserts extends BaseErroringAssertsProvider<TransferLis
class MissingPaymentAsserts extends TransferListAsserts {
public MissingPaymentAsserts(Function<HapiApiSpec, Map.Entry<AccountID, Long>>... providers) {
registerProvider((spec, o) -> {
- TransferList actual = (TransferList)o;
+ TransferList actual = (TransferList) o;
Set<String> missing = Stream.of(providers).map(provider -> asSig(provider.apply(spec))).collect(toSet());
Set<String> nonAbsent = new HashSet<>();
- actual.getAccountAmountsList().stream().forEach(entry -> {
+ actual.getAccountAmountsList().stream().forEach(entry -> {
String sig = asSig(new AbstractMap.SimpleEntry<>(entry.getAccountID(), entry.getAmount()));
if (missing.contains(sig)) {
nonAbsent.add(sig);
@@ -100,12 +110,27 @@ class MissingPaymentAsserts extends TransferListAsserts {
}
}
+class ExactParticipantsAssert extends TransferListAsserts {
+ public ExactParticipantsAssert(Function<HapiApiSpec, List<AccountID>> provider) {
+ registerProvider((spec, o) -> {
+ List<AccountID> expectedParticipants = provider.apply(spec);
+ TransferList actual = (TransferList) o;
+ Assert.assertEquals("Wrong number of participants!",
+ expectedParticipants.size(),
+ actual.getAccountAmountsCount());
+ for (int i = 0, n = expectedParticipants.size(); i < n; i++) {
+ Assert.assertEquals(expectedParticipants.get(i), actual.getAccountAmounts(i).getAccountID());
+ }
+ });
+ }
+}
+
class ExplicitTransferAsserts extends TransferListAsserts {
public ExplicitTransferAsserts(List<Function<HapiApiSpec, TransferList>> providers) {
providers.stream().forEach(provider -> {
registerProvider((spec, o) -> {
TransferList expected = provider.apply(spec);
- assertInclusion(expected, (TransferList)o);
+ assertInclusion(expected, (TransferList) o);
});
});
}
@@ -114,7 +139,7 @@ class ExplicitTransferAsserts extends TransferListAsserts {
class QualifyingDeductionAssert extends TransferListAsserts {
public QualifyingDeductionAssert(String desc, String payer) {
registerProvider((spec, o) -> {
- var transfers = (TransferList)o;
+ var transfers = (TransferList) o;
var hasQualifying = getDeduction(transfers, asId(payer, spec)).isPresent();
if (!hasQualifying) {
Assert.fail("No qualifying " + desc + " from " + payer + " in " + readableTransferList(transfers));
@@ -126,7 +151,7 @@ class QualifyingDeductionAssert extends TransferListAsserts {
class NonEmptyTransferAsserts extends TransferListAsserts {
public NonEmptyTransferAsserts() {
registerProvider((spec, o) -> {
- TransferList transfers = (TransferList)o;
+ TransferList transfers = (TransferList) o;
Assert.assertTrue("Transfer list cannot be empty!", !transfers.getAccountAmountsList().isEmpty());
});
}
@@ -135,7 +160,7 @@ class NonEmptyTransferAsserts extends TransferListAsserts {
class DeductionAsserts extends TransferListAsserts {
public DeductionAsserts(LongSupplier from, long amount) {
registerProvider((sepc, o) -> {
- TransferList transfers = (TransferList)o;
+ TransferList transfers = (TransferList) o;
long num = from.getAsLong();
Assert.assertTrue(
String.format("No deduction of -%d tinyBars from 0.0.%d detected!", amount, num),
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/meta/HapiGetTxnRecord.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/meta/HapiGetTxnRecord.java
index 0fdb9785cd..252e9fe366 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/meta/HapiGetTxnRecord.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/meta/HapiGetTxnRecord.java
@@ -27,10 +27,7 @@ import com.hedera.services.bdd.spec.assertions.ErroringAsserts;
import com.hedera.services.bdd.spec.assertions.ErroringAssertsProvider;
import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts;
import com.hedera.services.bdd.spec.queries.HapiQueryOp;
-import com.hedera.services.bdd.spec.queries.QueryVerbs;
import com.hedera.services.bdd.spec.transactions.TxnUtils;
-import com.hedera.services.bdd.spec.transactions.schedule.HapiScheduleCreate;
-import com.hedera.services.bdd.spec.utilops.CustomSpecAssert;
import com.hedera.services.legacy.proto.utils.CommonUtils;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.Query;
@@ -67,6 +64,7 @@ public class HapiGetTxnRecord extends HapiQueryOp<HapiGetTxnRecord> {
boolean useDefaultTxnId = false;
boolean requestDuplicates = false;
boolean shouldBeTransferFree = false;
+ boolean assertOnlyPriority = false;
boolean assertNothingAboutHashes = false;
boolean lookupScheduledFromRegistryId = false;
Optional<TransactionID> explicitTxnId = Optional.empty();
@@ -102,6 +100,11 @@ public class HapiGetTxnRecord extends HapiQueryOp<HapiGetTxnRecord> {
return this;
}
+ public HapiGetTxnRecord assertingOnlyPriority() {
+ assertOnlyPriority = true;
+ return this;
+ }
+
public HapiGetTxnRecord scheduledBy(String creation) {
scheduled = true;
creationName = Optional.of(creation);
@@ -242,11 +245,11 @@ public class HapiGetTxnRecord extends HapiQueryOp<HapiGetTxnRecord> {
if (assertNothing) {
return;
}
- if (scheduled) {
- return;
- }
TransactionRecord actualRecord = response.getTransactionGetRecord().getTransactionRecord();
assertPriority(spec, actualRecord);
+ if (scheduled || assertOnlyPriority) {
+ return;
+ }
assertDuplicates(spec);
if (!assertNothingAboutHashes) {
assertTransactionHash(spec, actualRecord);
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
index 9bfeddeb70..67dde859ba 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java
@@ -695,6 +695,11 @@ public abstract class HapiTxnOp<T extends HapiTxnOp<T>> extends HapiSpecOperatio
return self();
}
+ public T sansNodeAccount() {
+ omitNodeAccount = true;
+ return self();
+ }
+
public TransactionReceipt getLastReceipt() {
return lastReceipt;
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleCreate.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleCreate.java
index 8c1c042231..5b42d973cb 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleCreate.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleCreate.java
@@ -23,12 +23,10 @@ package com.hedera.services.bdd.spec.transactions.schedule;
import com.google.common.base.MoreObjects;
import com.google.protobuf.ByteString;
import com.hedera.services.bdd.spec.HapiApiSpec;
-import com.hedera.services.bdd.spec.HapiPropertySource;
import com.hedera.services.bdd.spec.HapiSpecSetup;
import com.hedera.services.bdd.spec.keys.TrieSigMapGenerator;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.spec.transactions.TxnUtils;
-import com.hedera.services.legacy.proto.utils.CommonUtils;
import com.hedera.services.usage.schedule.ScheduleCreateUsage;
import com.hederahashgraph.api.proto.java.FeeData;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
@@ -37,7 +35,6 @@ import com.hederahashgraph.api.proto.java.ScheduleCreateTransactionBody;
import com.hederahashgraph.api.proto.java.SignatureMap;
import com.hederahashgraph.api.proto.java.Transaction;
import com.hederahashgraph.api.proto.java.TransactionBody;
-import com.hederahashgraph.api.proto.java.TransactionID;
import com.hederahashgraph.api.proto.java.TransactionResponse;
import com.hederahashgraph.fee.SigValueObj;
import org.apache.logging.log4j.LogManager;
@@ -82,7 +79,7 @@ public class HapiScheduleCreate<T extends HapiTxnOp<T>> extends HapiTxnOp<HapiSc
public HapiScheduleCreate(String scheduled, HapiTxnOp<T> txn) {
this.entity = scheduled;
- this.scheduled = txn.withLegacyProtoStructure().sansTxnId();
+ this.scheduled = txn.withLegacyProtoStructure().sansTxnId().sansNodeAccount();
}
public HapiScheduleCreate<T> savingExpectedScheduledTxnId() {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java
index b5e78bfac5..b49ad1f9d4 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java
@@ -27,11 +27,13 @@ import com.hederahashgraph.api.proto.java.TransactionID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import static com.hedera.services.bdd.spec.HapiApiSpec.defaultHapiSpec;
import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith;
+import static com.hedera.services.bdd.spec.assertions.TransferListAsserts.exactParticipants;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getReceipt;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTopicInfo;
@@ -50,6 +52,8 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.usableTxnIdNamed;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.validateChargedUsdWithin;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_PAYER_BALANCE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TRANSACTION_ID_FIELD_NOT_ALLOWED;
@@ -75,6 +79,8 @@ public class ScheduleRecordSpecs extends HapiApiSuite {
suiteCleanup(),
canonicalScheduleOpsHaveExpectedUsdFees(),
canScheduleChunkedMessages(),
+ noFeesChargedIfTriggeredPayerIsInsolvent(),
+ noFeesChargedIfTriggeredPayerIsUnwilling(),
}
);
}
@@ -134,6 +140,51 @@ public class ScheduleRecordSpecs extends HapiApiSuite {
);
}
+ public HapiApiSpec noFeesChargedIfTriggeredPayerIsUnwilling() {
+ return defaultHapiSpec("NoFeesChargedIfTriggeredPayerIsUnwilling")
+ .given(
+ cryptoCreate("unwillingPayer")
+ ).when(
+ scheduleCreate("schedule",
+ cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1))
+ .fee(1L)
+ .signedBy(GENESIS, "unwillingPayer")
+ ).inheritingScheduledSigs()
+ .via("simpleXferSchedule")
+ .designatingPayer("unwillingPayer")
+ .savingExpectedScheduledTxnId()
+ ).then(
+ getTxnRecord("simpleXferSchedule")
+ .scheduledBy("schedule")
+ .hasPriority(recordWith()
+ .transfers(exactParticipants(ignore -> Collections.emptyList()))
+ .status(INSUFFICIENT_TX_FEE))
+
+ );
+ }
+
+ public HapiApiSpec noFeesChargedIfTriggeredPayerIsInsolvent() {
+ return defaultHapiSpec("NoFeesChargedIfTriggeredPayerIsInsolvent")
+ .given(
+ cryptoCreate("insolventPayer").balance(0L)
+ ).when(
+ scheduleCreate("schedule",
+ cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1))
+ .signedBy(GENESIS, "insolventPayer")
+ ).inheritingScheduledSigs()
+ .via("simpleXferSchedule")
+ .designatingPayer("insolventPayer")
+ .savingExpectedScheduledTxnId()
+ ).then(
+ getTxnRecord("simpleXferSchedule")
+ .scheduledBy("schedule")
+ .hasPriority(recordWith()
+ .transfers(exactParticipants(ignore -> Collections.emptyList()))
+ .status(INSUFFICIENT_PAYER_BALANCE))
+
+ );
+ }
+
public HapiApiSpec canScheduleChunkedMessages() {
String ofGeneralInterest = "Scotch";
AtomicReference<TransactionID> initialTxnId = new AtomicReference<>();
@@ -157,18 +208,39 @@ public class ScheduleRecordSpecs extends HapiApiSuite {
.signedBy("payingSender")
)
.txnId("begin")
+ .logged()
.signedBy("payingSender")
.inheritingScheduledSigs()
),
- getTxnRecord("begin").scheduled().hasPriority(recordWith().status(SUCCESS))
+ getTxnRecord("begin").hasPriority(recordWith()
+ .status(SUCCESS)
+ .transfers(exactParticipants(spec -> List.of(
+ spec.setup().defaultNode(),
+ spec.setup().fundingAccount(),
+ spec.registry().getAccountID("payingSender")
+ )))).assertingOnlyPriority().logged(),
+ getTxnRecord("begin").scheduled().hasPriority(recordWith()
+ .status(SUCCESS)
+ .transfers(exactParticipants(spec -> List.of(
+ spec.setup().fundingAccount(),
+ spec.registry().getAccountID("payingSender")
+ )))).logged()
).then(
scheduleCreate("secondChunk",
submitMessageTo(ofGeneralInterest)
.chunkInfo(3, 2, "payingSender")
.signedBy("payingSender")
)
+ .via("end")
+ .logged()
.payingWith("payingSender")
.inheritingScheduledSigs(),
+ getTxnRecord("end").scheduled().hasPriority(recordWith()
+ .status(SUCCESS)
+ .transfers(exactParticipants(spec -> List.of(
+ spec.setup().fundingAccount(),
+ spec.registry().getAccountID("payingSender")
+ )))).logged(),
getTopicInfo(ofGeneralInterest).logged().hasSeqNo(2L)
);
} | ['hedera-node/src/main/java/com/hedera/services/fees/charging/ItemizableFeeCharging.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecOperation.java', 'hedera-node/src/main/java/com/hedera/services/legacy/services/state/AwareProcessLogic.java', 'hedera-node/src/main/java/com/hedera/services/fees/charging/TxnFeeChargingPolicy.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleCreate.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/queries/meta/HapiGetTxnRecord.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java', 'hedera-node/src/test/java/com/hedera/services/fees/charging/TxnFeeChargingPolicyTest.java', 'hedera-node/src/test/java/com/hedera/services/fees/charging/ItemizableFeeChargingTest.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/assertions/TransferListAsserts.java'] | {'.java': 11} | 11 | 11 | 0 | 0 | 11 | 6,092,794 | 1,461,651 | 171,750 | 1,071 | 7,255 | 1,800 | 190 | 9 | 905 | 107 | 209 | 8 | 1 | 0 | 1970-01-01T00:26:54 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,470 | hashgraph/hedera-services/1129/1128 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1128 | https://github.com/hashgraph/hedera-services/pull/1129 | https://github.com/hashgraph/hedera-services/pull/1129 | 1 | closes | Fix HAPIClientValidator for File-Restart-Performance-Size1K_4K-28-16m and Contract-Basic-Performance-24-16m for daily perf tests | **Summary of the defect**
For these two perf regression tests, HapiClientValidator fails on UmbrellaRedux for unclear reason.
**How to reproduce (if possible)**
Please refer to https://hedera-hashgraph.slack.com/archives/C018Y4E6ADT/p1614588123017800
They also failed for local run.
**Service logs (if applicable)**
```
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| 7332b4f2784a99f6368b6bb7dac7477a94dcd2d2 | bda98c3d96f8bf2b323f513de6b82ed6599342b1 | https://github.com/hashgraph/hedera-services/compare/7332b4f2784a99f6368b6bb7dac7477a94dcd2d2...bda98c3d96f8bf2b323f513de6b82ed6599342b1 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/RegressionProviderFactory.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/RegressionProviderFactory.java
index f86686f949..1f625d327e 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/RegressionProviderFactory.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/RegressionProviderFactory.java
@@ -71,7 +71,6 @@ import com.hedera.services.bdd.spec.props.JutilPropertySource;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ContractID;
import com.hederahashgraph.api.proto.java.FileID;
-import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.Key;
import com.hederahashgraph.api.proto.java.TokenID;
import com.hederahashgraph.api.proto.java.TopicID;
@@ -119,10 +118,10 @@ public class RegressionProviderFactory {
/* ----- META ----- */
.withOp(
new RandomRecord(spec.txns()),
- props.getInteger("randomRecord.bias"))
+ intPropOrElse("randomRecord.bias", 0, props))
.withOp(
new RandomReceipt(spec.txns()),
- props.getInteger("randomReceipt.bias"))
+ intPropOrElse("randomReceipt.bias", 0, props))
/* ----- CRYPTO ----- */
.withOp(
new RandomAccount(keys, allAccounts)
@@ -133,7 +132,7 @@ public class RegressionProviderFactory {
"randomTransfer.numStableAccounts",
RandomTransfer.DEFAULT_NUM_STABLE_ACCOUNTS,
props)),
- props.getInteger("randomAccount.bias"))
+ intPropOrElse("randomAccount.bias", 0, props))
.withOp(
new RandomTransfer(allAccounts)
.numStableAccounts(
@@ -146,19 +145,19 @@ public class RegressionProviderFactory {
"randomTransfer.recordProbability",
RandomTransfer.DEFAULT_RECORD_PROBABILITY,
props)),
- props.getInteger("randomTransfer.bias"))
+ intPropOrElse("randomTransfer.bias", 0, props))
.withOp(
new RandomAccountUpdate(keys, unstableAccounts),
- props.getInteger("randomAccountUpdate.bias"))
+ intPropOrElse("randomAccountUpdate.bias", 0, props))
.withOp(
new RandomAccountDeletion(unstableAccounts),
- props.getInteger("randomAccountDeletion.bias"))
+ intPropOrElse("randomAccountDeletion.bias", 0, props))
.withOp(
new RandomAccountInfo(allAccounts),
- props.getInteger("randomAccountInfo.bias"))
+ intPropOrElse("randomAccountInfo.bias", 0, props))
.withOp(
new RandomAccountRecords(allAccounts),
- props.getInteger("randomAccountRecords.bias"))
+ intPropOrElse("randomAccountRecords.bias", 0, props))
/* ---- CONSENSUS ---- */
.withOp(
new RandomTopicCreation(keys, allTopics)
@@ -212,67 +211,67 @@ public class RegressionProviderFactory {
/* ---- TOKEN ---- */
.withOp(
new RandomToken(keys, tokens, allAccounts),
- props.getInteger("randomToken.bias"))
+ intPropOrElse("randomToken.bias", 0, props))
.withOp(
new RandomTokenAssociation(tokens, allAccounts, tokenRels)
.ceiling(intPropOrElse(
"randomTokenAssociation.ceilingNum",
RandomTokenAssociation.DEFAULT_CEILING_NUM,
props)),
- props.getInteger("randomTokenAssociation.bias"))
+ intPropOrElse("randomTokenAssociation.bias", 0, props))
.withOp(
new RandomTokenDissociation(tokenRels),
- props.getInteger("randomTokenDissociation.bias"))
+ intPropOrElse("randomTokenDissociation.bias", 0, props))
.withOp(
new RandomTokenDeletion(tokens),
- props.getInteger("randomTokenDeletion.bias"))
+ intPropOrElse("randomTokenDeletion.bias", 0, props))
.withOp(
new RandomTokenTransfer(tokenRels),
- props.getInteger("randomTokenTransfer.bias"))
+ intPropOrElse("randomTokenTransfer.bias", 0, props))
.withOp(
new RandomTokenFreeze(tokenRels),
- props.getInteger("randomTokenFreeze.bias"))
+ intPropOrElse("randomTokenFreeze.bias", 0, props))
.withOp(
new RandomTokenUnfreeze(tokenRels),
- props.getInteger("randomTokenUnfreeze.bias"))
+ intPropOrElse("randomTokenUnfreeze.bias", 0, props))
.withOp(
new RandomTokenKycGrant(tokenRels),
- props.getInteger("randomTokenKycGrant.bias"))
+ intPropOrElse("randomTokenKycGrant.bias", 0, props))
.withOp(
new RandomTokenKycRevoke(tokenRels),
- props.getInteger("randomTokenKycRevoke.bias"))
+ intPropOrElse("randomTokenKycRevoke.bias", 0, props))
.withOp(
new RandomTokenMint(tokens),
- props.getInteger("randomTokenMint.bias"))
+ intPropOrElse("randomTokenMint.bias", 0, props))
.withOp(
new RandomTokenBurn(tokens),
- props.getInteger("randomTokenBurn.bias"))
+ intPropOrElse("randomTokenBurn.bias", 0, props))
.withOp(
new RandomTokenUpdate(keys, tokens, allAccounts),
- props.getInteger("randomTokenUpdate.bias"))
+ intPropOrElse("randomTokenUpdate.bias", 0, props))
.withOp(
new RandomTokenAccountWipe(tokenRels),
- props.getInteger("randomTokenAccountWipe.bias"))
+ intPropOrElse("randomTokenAccountWipe.bias", 0, props))
.withOp(
new RandomTokenInfo(tokens),
- props.getInteger("randomTokenInfo.bias"))
+ intPropOrElse("randomTokenInfo.bias", 0, props))
/* ---- CONTRACT ---- */
.withOp(
new RandomCall(calls),
- props.getInteger("randomCall.bias"))
+ intPropOrElse("randomCall.bias", 0, props))
.withOp(
new RandomCallLocal(localCalls),
- props.getInteger("randomCallLocal.bias"))
+ intPropOrElse("randomCallLocal.bias", 0, props))
.withOp(
new RandomContractDeletion(allAccounts, contracts),
- props.getInteger("randomContractDeletion.bias"))
+ intPropOrElse("randomContractDeletion.bias", 0, props))
.withOp(
new RandomContract(keys, contracts)
.ceiling(intPropOrElse(
"randomContract.ceilingNum",
RandomContract.DEFAULT_CEILING_NUM,
props)),
- props.getInteger("randomContract.bias"));
+ intPropOrElse("randomContract.bias", 0, props));
};
}
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java
index a687331100..88b64d5695 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java
@@ -35,6 +35,7 @@ import java.util.concurrent.atomic.AtomicReference;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor;
+import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
import static com.hedera.services.bdd.suites.regression.RegressionProviderFactory.factoryFrom;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
@@ -84,11 +85,13 @@ public class UmbrellaRedux extends HapiApiSuite {
getTxnRecord("createUniquePayer").logged()
).then(
withOpContext((spec, opLog) -> configureFromCi(spec)),
- runWithProvider(factoryFrom(props::get))
+ sourcing( () -> runWithProvider(factoryFrom(props::get))
.lasting(duration::get, unit::get)
.maxOpsPerSec(maxOpsPerSec::get)
.maxPendingOps(maxPendingOps::get)
.backoffSleepSecs(backoffSleepSecs::get)
+
+ )
);
}
@@ -115,6 +118,9 @@ public class UmbrellaRedux extends HapiApiSuite {
if (ciProps.has("statusTimeoutSecs")) {
statusTimeoutSecs.set(ciProps.getInteger("statusTimeoutSecs"));
}
+ if (ciProps.has("secondsWaitingServerUp")) {
+ statusTimeoutSecs.set(ciProps.getInteger("secondsWaitingServerUp"));
+ }
}
@Override | ['test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/regression/RegressionProviderFactory.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,092,671 | 1,460,976 | 171,721 | 1,071 | 3,273 | 736 | 61 | 2 | 600 | 77 | 173 | 21 | 1 | 1 | 1970-01-01T00:26:54 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,473 | hashgraph/hedera-services/1055/1054 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1054 | https://github.com/hashgraph/hedera-services/pull/1055 | https://github.com/hashgraph/hedera-services/pull/1055 | 1 | closes | Fix INSUFFICIENT_PAYER_BALANCE error in nightly tests | **Summary of the defect**
Most of the nightly regression tests are failing with `INSUFFICIENT_PAYER_BALANCE` error on Feb 3rd.
Failing Crypto-Restart test link : https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1612420086307000
Possible Resolution:
This might be solved by using `long initialBalance = 5_000_000_000_000L;` in `CryptoCreateForSuiteRunner` and use different initialBalance for performance tests in LoadTest `public static OptionalLong initialBalance = OptionalLong.of(900_000_000_000L);` | ab151f09f95d47bc43a11099557507a3a9c46241 | e864c8d85351cb4bbcbb1d5e79d6f7dc3f959614 | https://github.com/hashgraph/hedera-services/compare/ab151f09f95d47bc43a11099557507a3a9c46241...e864c8d85351cb4bbcbb1d5e79d6f7dc3f959614 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/utilops/LoadTest.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/utilops/LoadTest.java
index 9b2c60b105..87edc6957c 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/utilops/LoadTest.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/utilops/LoadTest.java
@@ -31,7 +31,6 @@ import java.util.List;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.OptionalLong;
-import java.util.Optional;
import java.util.function.Supplier;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runLoadTest;
@@ -45,8 +44,8 @@ public class LoadTest extends HapiApiSuite {
public static OptionalInt threadNumber = OptionalInt.empty();
public static OptionalInt hcsSubmitMessage = OptionalInt.empty();
public static OptionalInt hcsSubmitMessageSizeVar = OptionalInt.empty();
- /** initial balance of payer account used for paying for performance test transactions */
- public static OptionalLong initialBalance = OptionalLong.of(90_000_000_000_000L);
+ /** initial balance of account used as sender for performance test transactions */
+ public static OptionalLong initialBalance = OptionalLong.of(ONE_HBAR * 1_000_000L);
public static OptionalInt totalTestAccounts = OptionalInt.empty();
public static OptionalInt totalTestTopics = OptionalInt.empty();
public static OptionalInt totalTestTokens = OptionalInt.empty();
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoCreateForSuiteRunner.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoCreateForSuiteRunner.java
index 92ef4fd80d..7261230a41 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoCreateForSuiteRunner.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoCreateForSuiteRunner.java
@@ -21,6 +21,7 @@ package com.hedera.services.bdd.suites.crypto;
*/
import com.hedera.services.bdd.spec.HapiApiSpec;
+import com.hedera.services.bdd.spec.utilops.LoadTest;
import com.hedera.services.bdd.suites.HapiApiSuite;
import com.hedera.services.bdd.suites.SuiteRunner;
import org.apache.logging.log4j.LogManager;
@@ -29,6 +30,7 @@ import org.junit.Assert;
import java.util.List;
import java.util.Map;
+import java.util.OptionalLong;
import static com.hedera.services.bdd.spec.HapiApiSpec.customHapiSpec;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo;
@@ -37,7 +39,6 @@ import static com.hedera.services.bdd.spec.transactions.TxnUtils.NOISY_RETRY_PRE
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate;
import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
-import static com.hedera.services.bdd.spec.utilops.LoadTest.initialBalance;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;
/**
@@ -49,6 +50,9 @@ public class CryptoCreateForSuiteRunner extends HapiApiSuite {
private String nodes;
private String defaultNode;
+ // Use more initialBalance for this account as it is used as payer for the performance tests
+ private static long initialBalance = 5L * LoadTest.initialBalance.getAsLong();
+
public CryptoCreateForSuiteRunner(String nodes, String defaultNode) {
this.nodes = nodes;
this.defaultNode = defaultNode;
@@ -76,7 +80,7 @@ public class CryptoCreateForSuiteRunner extends HapiApiSuite {
while (!createdAuditablePayer) {
try {
var cryptoCreateOp = cryptoCreate("payerAccount")
- .balance(initialBalance.getAsLong())
+ .balance(initialBalance)
.withRecharging()
.rechargeWindow(3)
.key(GENESIS) | ['test-clients/src/main/java/com/hedera/services/bdd/spec/utilops/LoadTest.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoCreateForSuiteRunner.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 5,786,242 | 1,385,943 | 163,065 | 1,030 | 805 | 167 | 13 | 2 | 515 | 53 | 136 | 6 | 1 | 0 | 1970-01-01T00:26:52 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,468 | hashgraph/hedera-services/1150/1149 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1149 | https://github.com/hashgraph/hedera-services/pull/1150 | https://github.com/hashgraph/hedera-services/pull/1150 | 1 | closes | Fix HTS-Restart-Performance-Hotspot-10k-61m test failure for release/0.12.0 | **Summary of the defect**
This test sometimes fails due to hedera server node enter `MAINTENACE` mode to quick and leave its test-client hanging out with lost connection.
**How to reproduce (if possible)**
Example: https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1614939819024100
**Service logs (if applicable)**
```
---- HAPIClientValidator FAILED validation ----
<<ERROR>>
2021-03-05 10:21:06.291 ERROR 262 HapiTxnOp - 'RunTokenTransfers' - HapiCryptoTransfer{sigs=2, node=0.0.3, transfers=[], tokenTransfers=0.0.2101451([0.0.2101454 -> -1, 0.0.2101460 <- +1])} Wrong actual status UNKNOWN, not one of [OK, DUPLICATE_TRANSACTION, SUCCESS]!
2021-03-05 10:21:06.291 ERROR 262 HapiTxnOp - 'RunTokenTransfers' - HapiCryptoTransfer{sigs=2, node=0.0.3, transfers=[], tokenTransfers=0.0.2101451([0.0.2101454 -> -1, 0.0.2101460 <- +1])} Wrong actual status UNKNOWN, not one of [OK, DUPLICATE_TRANSACTION, SUCCESS]!
2021-03-05 10:21:06.291 ERROR 262 HapiTxnOp - 'RunTokenTransfers' - HapiCryptoTransfer{sigs=2, node=0.0.3, transfers=[], tokenTransfers=0.0.2101451([0.0.2101460 -> -1, 0.0.2101454 <- +1])} Wrong actual status UNKNOWN, not one of [OK, DUPLICATE_TRANSACTION, SUCCESS]!
2021-03-05 10:21:06.291 ERROR 262 HapiTxnOp - 'RunTokenTransfers' - HapiCryptoTransfer{sigs=2, node=0.0.3, transfers=[],
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| fe6a3594d3f5a8f0b34c01438df32aabfee487dd | 847098f9a1469cf904e2f8755e048eb189f097e5 | https://github.com/hashgraph/hedera-services/compare/fe6a3594d3f5a8f0b34c01438df32aabfee487dd...847098f9a1469cf904e2f8755e048eb189f097e5 | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/TokenTransfersLoadProvider.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/TokenTransfersLoadProvider.java
index 66c64a18e3..d98804ca41 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/TokenTransfersLoadProvider.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/TokenTransfersLoadProvider.java
@@ -33,6 +33,7 @@ import com.hedera.services.bdd.spec.utilops.CustomSpecAssert;
import com.hedera.services.bdd.suites.HapiApiSuite;
import com.hedera.services.bdd.suites.utils.sysfiles.serdes.FeesJsonToGrpcBytes;
import com.hedera.services.bdd.suites.utils.sysfiles.serdes.SysFileSerde;
+import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -66,6 +67,12 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.freeze;
import static com.hedera.services.bdd.suites.perf.PerfUtilOps.stdMgmtOf;
import static com.hedera.services.bdd.suites.perf.PerfUtilOps.tokenOpsEnablement;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.DUPLICATE_TRANSACTION;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_PAYER_BALANCE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PLATFORM_NOT_ACTIVE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.UNKNOWN;
import static java.util.Map.entry;
import static java.util.concurrent.TimeUnit.MINUTES;
@@ -106,7 +113,9 @@ public class TokenTransfersLoadProvider extends HapiApiSuite {
// end to prevent it from making new export files that may cause account balances validator to
// be inconsistent. The freeze shouldn't cause normal perf test any issue.
freeze().payingWith(GENESIS)
- .startingIn(10).seconds()
+ .startingIn(30).seconds()
+ .hasKnownStatusFrom(SUCCESS,UNKNOWN)
+ .hasAnyPrecheck()
.andLasting(10).minutes(),
sleepFor(60_000)
);
@@ -153,7 +162,8 @@ public class TokenTransfersLoadProvider extends HapiApiSuite {
boolean hasKnownHtsFeeSchedules = false;
SysFileSerde<String> serde = new FeesJsonToGrpcBytes();
while (!hasKnownHtsFeeSchedules) {
- var query = QueryVerbs.getFileContents(FEE_SCHEDULE);
+ var query = QueryVerbs.getFileContents(FEE_SCHEDULE)
+ .fee(10_000_000_000L);
try {
allRunFor(spec, query);
var contents = query.getResponse().getFileGetContents().getFileContents().getContents();
@@ -225,8 +235,9 @@ public class TokenTransfersLoadProvider extends HapiApiSuite {
}
}
op = cryptoTransfer(xfers)
- .hasKnownStatusFrom(NOISY_ALLOWED_STATUSES)
+ .hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE)
.hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS)
+ .hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE)
.noLogging()
.deferStatusResolution();
firstDir.set(Boolean.FALSE);
@@ -244,8 +255,9 @@ public class TokenTransfersLoadProvider extends HapiApiSuite {
}
}
op = cryptoTransfer(xfers)
+ .hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE)
.hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS)
- .hasKnownStatusFrom(NOISY_ALLOWED_STATUSES)
+ .hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE)
.noLogging()
.deferStatusResolution();
firstDir.set(Boolean.TRUE); | ['test-clients/src/main/java/com/hedera/services/bdd/suites/perf/TokenTransfersLoadProvider.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 5,979,912 | 1,433,627 | 168,662 | 1,051 | 1,261 | 303 | 20 | 1 | 1,601 | 177 | 561 | 26 | 1 | 1 | 1970-01-01T00:26:54 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,459 | hashgraph/hedera-services/1346/1337 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1337 | https://github.com/hashgraph/hedera-services/pull/1346 | https://github.com/hashgraph/hedera-services/pull/1346 | 1 | closes | Account Balances exportPeriod doesn't control export timing | **Summary of the defect**
The export frequency doesn't follow the control of `balances.exportPeriodSecs` property. The example here is the EET client updates this property to be 120 seconds. However, it still exports account balances based on previous value.
From the output.log, we can see this behavior:
```
balances.exportDir.path=/opt/hgcapp/accountBalances/
balances.exportEnabled=true
balances.exportPeriodSecs=600
balances.exportTokenBalances=true
hedera.accountsExportPath=data/onboard/exportedAccount.txt
hedera.exportAccountsOnStartup=false
balances.exportDir.path=data/accountBalances/
balances.exportPeriodSecs=60
balances.exportDir.path=data/accountBalances/
balances.exportPeriodSecs=60
2021-04-28 01:46:39.608 INFO 167 ServicesMain - Accounts exported.
balances.exportDir.path=data/accountBalances/
balances.exportPeriodSecs=120
2021-04-28 01:48:00.816 INFO 142 SignedStateBalancesExporter - Took 95ms to summarize signed state balances
2021-04-28 01:48:00.849 INFO 179 SignedStateBalancesExporter - -> Took 32ms to export and sign proto balances file at 2021-04-28T01:48:00.145707Z
2021-04-28 01:48:00.950 INFO 164 SignedStateBalancesExporter - -> Took 101ms to export and sign CSV balances file at 2021-04-28T01:48:00.145707Z
2021-04-28 01:49:00.972 INFO 142 SignedStateBalancesExporter - Took 338ms to summarize signed state balances
2021-04-28 01:49:01.059 INFO 179 SignedStateBalancesExporter - -> Took 86ms to export and sign proto balances file at 2021-04-28T01:49:00.077185Z
2021-04-28 01:49:01.567 INFO 164 SignedStateBalancesExporter - -> Took 507ms to export and sign CSV balances file at 2021-04-28T01:49:00.077185Z
2021-04-28 01:50:01.057 INFO 142 SignedStateBalancesExporter - Took 597ms to summarize signed state balances
2021-04-28 01:50:01.172 INFO 179 SignedStateBalancesExporter - -> Took 114ms to export and sign proto balances file at 2021-04-28T01:50:00.028142Z
2021-04-28 01:50:01.759 INFO 164 SignedStateBalancesExporter - -> Took 585ms to export and sign CSV balances file at 2021-04-28T01:50:00.028142Z
2021-04-28 01:51:00.877 INFO 142 SignedStateBalancesExporter - Took 334ms to summarize signed state balances
2021-04-28 01:51:00.981 INFO 179 SignedStateBalancesExporter - -> Took 102ms to export and sign proto balances file at 2021-04-28T01:51:00.044377Z
2021-04-28 01:51:01.591 INFO 164 SignedStateBalancesExporter - -> Took 610ms to export and sign CSV balances file at 2021-04-28T01:51:00.044377Z
2021-04-28 01:52:01.168 INFO 142 SignedStateBalancesExporter - Took 497ms to summarize signed state balances
2021-04-28 01:52:01.274 INFO 179 SignedStateBalancesExporter - -> Took 105ms to export and sign proto balances file at 2021-04-28T01:52:00.082098Z
2021-04-28 01:52:02.038 INFO 164 SignedStateBalancesExporter - -> Took 763ms to export and sign CSV balances file at 2021-04-28T01:52:00.082098Z
2021-04-28 01:53:01.213 INFO 142 SignedStateBalancesExporter - Took 668ms to summarize signed state balances
2021-04-28 01:53:01.317 INFO 179 SignedStateBalancesExporter - -> Took 103ms to export and sign proto balances file at 2021-04-28T01:53:00.065053Z
2021-04-28 01:53:01.899 INFO 164 SignedStateBalancesExporter - -> Took 581ms to export and sign CSV balances file at 2021-04-28T01:53:00.065053Z
2021-04-28 01:54:01.250 INFO 142 SignedStateBalancesExporter - Took 628ms to summarize signed state balances
2021-04-28 01:54:01.359 INFO 179 SignedStateBalancesExporter - -> Took 108ms to export and sign proto balances file at 2021-04-28T01:54:00.102884Z
2021-04-28 01:54:01.990 INFO 164 SignedStateBalancesExporter - -> Took 630ms to export and sign CSV balances file at 2021-04-28T01:54:00.102884Z
2021-04-28 01:55:01.283 INFO 142 SignedStateBalancesExporter - Took 641ms to summarize signed state balances
2021-04-28 01:55:01.384 INFO 179 SignedStateBalancesExporter - -> Took 100ms to export and sign proto balances file at 2021-04-28T01:55:00.177061Z
2021-04-28 01:55:01.956 INFO 164 SignedStateBalancesExporter - -> Took 571ms to export and sign CSV balances file at 2021-04-28T01:55:00.177061Z
2021-04-28 01:56:00.832 INFO 142 SignedStateBalancesExporter - Took 353ms to summarize signed state balances
2021-04-28 01:56:00.933 INFO 179 SignedStateBalancesExporter - -> Took 100ms to export and sign proto balances file at 2021-04-28T01:56:00.096215Z
2021-04-28 01:56:01.515 INFO 164 SignedStateBalancesExporter - -> Took 581ms to export and sign CSV balances file at 2021-04-28T01:56:00.096215Z
```
**How to reproduce (if possible)**
Follow the slack report link: https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1619575144055800 into the test results and get into the node0000's output.log and also the exported account balances files to find the details.
**Service logs (if applicable)**
```
...
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| f5aa3475297586c9fb222e378f3f52f37334c685 | 33d28d5a48b79b68b2219fc0e163adb5ca839335 | https://github.com/hashgraph/hedera-services/compare/f5aa3475297586c9fb222e378f3f52f37334c685...33d28d5a48b79b68b2219fc0e163adb5ca839335 | diff --git a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
index 7a6ad5e425..d4bf0ea547 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
@@ -96,7 +96,6 @@ public class SignedStateBalancesExporter implements BalancesExporter {
private BalancesSummary summary;
Instant periodBegin = NEVER;
- private final int exportPeriod;
static final Comparator<SingleAccountBalances> SINGLE_ACCOUNT_BALANCES_COMPARATOR =
Comparator.comparing(SingleAccountBalances::getAccountID, ACCOUNT_ID_COMPARATOR);
@@ -109,12 +108,13 @@ public class SignedStateBalancesExporter implements BalancesExporter {
this.signer = signer;
this.expectedFloat = properties.getLongProperty("ledger.totalTinyBarFloat");
this.dynamicProperties = dynamicProperties;
- exportPeriod = dynamicProperties.balancesExportPeriodSecs();
}
@Override
public boolean isTimeToExport(Instant now) {
- if ( periodBegin != NEVER && now.getEpochSecond() % exportPeriod <= ALLOWED_EXPORT_TIME_SKEW
+ final int exportPeriod = dynamicProperties.balancesExportPeriodSecs();
+ if ( periodBegin != NEVER
+ && now.getEpochSecond() % exportPeriod <= ALLOWED_EXPORT_TIME_SKEW
&& now.getEpochSecond() / exportPeriod != periodBegin.getEpochSecond() / exportPeriod) {
periodBegin = now;
return true; | ['hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,353,830 | 1,528,288 | 179,491 | 1,126 | 368 | 89 | 6 | 1 | 5,181 | 586 | 1,719 | 66 | 1 | 2 | 1970-01-01T00:26:59 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,452 | hashgraph/hedera-services/3648/3645 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/3645 | https://github.com/hashgraph/hedera-services/pull/3648 | https://github.com/hashgraph/hedera-services/pull/3648 | 1 | closes | IllegalStateException when loading v26 state file | ### Description
Regression migration test failed due to following error
```
2022-07-06 21:42:57.816 25 ERROR EXCEPTION <main> Browser: Saved state not loaded:
com.swirlds.platform.internal.SignedStateLoadingException: Exception while reading signed state!
at com.swirlds.platform.SwirldsPlatform.loadSavedStateFromDisk(SwirldsPlatform.java:608) ~[swirlds-platform-core-0.27.1.jar:?]
at com.swirlds.platform.Browser.createLocalPlatforms(Browser.java:680) ~[swirlds-platform-core-0.27.1.jar:?]
at com.swirlds.platform.Browser.startPlatforms(Browser.java:801) ~[swirlds-platform-core-0.27.1.jar:?]
at com.swirlds.platform.Browser.main(Browser.java:323) ~[swirlds-platform-core-0.27.1.jar:?]
Caused by: java.lang.IllegalStateException: No software version for deserialized state version 20
at com.hedera.services.ServicesState.init(ServicesState.java:226) ~[?:?]
at com.swirlds.platform.SwirldsPlatform.loadSavedStateFromDisk(SwirldsPlatform.java:596) ~[swirlds-platform-core-0.27.1.jar:?]
... 3 more
```
Slack report URL
https://swirldslabs.slack.com/archives/C03ELH5MUHK/p1657144301044709
### Steps to reproduce
Run regression test GCP-Daily-Crypto-Migration-7N-1C at branch 02614-D-migration-test-state-file
### Additional context
_No response_
### Hedera network
mainnet
### Version
latest master branch
### Operating system
_No response_ | f36d5bc3361c883879fec594d71a7fc449b372f4 | 9badfd94d57d21eb5906413ff3ead4ac428f1c6f | https://github.com/hashgraph/hedera-services/compare/f36d5bc3361c883879fec594d71a7fc449b372f4...9badfd94d57d21eb5906413ff3ead4ac428f1c6f | diff --git a/hedera-node/src/main/java/com/hedera/services/ServicesState.java b/hedera-node/src/main/java/com/hedera/services/ServicesState.java
index bc7f64a5e2..63ebc1bac8 100644
--- a/hedera-node/src/main/java/com/hedera/services/ServicesState.java
+++ b/hedera-node/src/main/java/com/hedera/services/ServicesState.java
@@ -91,6 +91,7 @@ import static com.hedera.services.state.migration.StateVersions.CURRENT_VERSION;
import static com.hedera.services.state.migration.StateVersions.FIRST_026X_VERSION;
import static com.hedera.services.state.migration.StateVersions.FIRST_027X_VERSION;
import static com.hedera.services.state.migration.StateVersions.MINIMUM_SUPPORTED_VERSION;
+import static com.hedera.services.state.migration.StateVersions.RELEASE_0270_VERSION;
import static com.hedera.services.state.migration.StateVersions.lastSoftwareVersionOf;
import static com.hedera.services.utils.EntityIdUtils.parseAccount;
import static com.swirlds.common.system.InitTrigger.GENESIS;
@@ -184,9 +185,9 @@ public class ServicesState extends AbstractNaryMerkleInternal implements SwirldS
@Override
public int getMinimumChildCount(int version) {
- if (version >= MINIMUM_SUPPORTED_VERSION && version < CURRENT_VERSION) {
+ if (version >= MINIMUM_SUPPORTED_VERSION && version < RELEASE_0270_VERSION) {
return NUM_POST_0210_CHILDREN;
- } else if (version == CURRENT_VERSION) {
+ } else if (version >= RELEASE_0270_VERSION && version <= CURRENT_VERSION) {
return NUM_POST_0260_CHILDREN;
} else {
throw new IllegalArgumentException("Argument 'version='" + version + "' is invalid!");
diff --git a/hedera-node/src/main/java/com/hedera/services/state/migration/StateVersions.java b/hedera-node/src/main/java/com/hedera/services/state/migration/StateVersions.java
index b847525d93..a8619b25f0 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/migration/StateVersions.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/migration/StateVersions.java
@@ -50,21 +50,23 @@ public final class StateVersions {
public static final int RELEASE_025X_VERSION = 18;
public static final int RELEASE_0260_VERSION = 19;
public static final int RELEASE_0270_VERSION = 20;
+ public static final int RELEASE_0280_VERSION = 21;
- public static final SerializableSemVers FIRST_025X_VERSION = forHapiAndHedera("0.25.1", "0.25.0");
public static final SerializableSemVers LAST_025X_VERSION = forHapiAndHedera("0.25.1", "0.25.4");
public static final SerializableSemVers FIRST_026X_VERSION = forHapiAndHedera("0.26.0", "0.26.0");
public static final SerializableSemVers LAST_026X_VERSION = forHapiAndHedera("0.26.0", "0.26.3");
public static final SerializableSemVers FIRST_027X_VERSION = forHapiAndHedera("0.27.0", "0.27.0");
+ public static final SerializableSemVers LAST_027X_VERSION = forHapiAndHedera("0.27.0", "0.27.3");
public static final int MINIMUM_SUPPORTED_VERSION = RELEASE_025X_VERSION;
- public static final int CURRENT_VERSION = RELEASE_0270_VERSION;
+ public static final int CURRENT_VERSION = RELEASE_0280_VERSION;
@Nullable
public static SerializableSemVers lastSoftwareVersionOf(final int stateVersion) {
return switch (stateVersion) {
case RELEASE_025X_VERSION -> LAST_025X_VERSION;
case RELEASE_0260_VERSION -> LAST_026X_VERSION;
+ case RELEASE_0270_VERSION -> LAST_027X_VERSION;
default -> null;
};
}
diff --git a/hedera-node/src/test/java/com/hedera/services/ServicesStateTest.java b/hedera-node/src/test/java/com/hedera/services/ServicesStateTest.java
index ffb45b671d..dc418bbe39 100644
--- a/hedera-node/src/test/java/com/hedera/services/ServicesStateTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/ServicesStateTest.java
@@ -543,6 +543,9 @@ class ServicesStateTest {
assertEquals(
StateChildIndices.NUM_POST_0210_CHILDREN,
subject.getMinimumChildCount(StateVersions.RELEASE_0260_VERSION));
+ assertEquals(
+ StateChildIndices.NUM_POST_0260_CHILDREN,
+ subject.getMinimumChildCount(StateVersions.RELEASE_0270_VERSION));
assertEquals(
StateChildIndices.NUM_POST_0260_CHILDREN,
subject.getMinimumChildCount(StateVersions.CURRENT_VERSION));
diff --git a/hedera-node/src/test/java/com/hedera/services/state/migration/StateVersionsTest.java b/hedera-node/src/test/java/com/hedera/services/state/migration/StateVersionsTest.java
index ea73110ee2..2236e9cb4f 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/migration/StateVersionsTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/migration/StateVersionsTest.java
@@ -24,9 +24,11 @@ import org.junit.jupiter.api.Test;
import static com.hedera.services.state.migration.StateVersions.LAST_025X_VERSION;
import static com.hedera.services.state.migration.StateVersions.LAST_026X_VERSION;
+import static com.hedera.services.state.migration.StateVersions.LAST_027X_VERSION;
import static com.hedera.services.state.migration.StateVersions.RELEASE_025X_VERSION;
import static com.hedera.services.state.migration.StateVersions.RELEASE_0260_VERSION;
import static com.hedera.services.state.migration.StateVersions.RELEASE_0270_VERSION;
+import static com.hedera.services.state.migration.StateVersions.RELEASE_0280_VERSION;
import static com.hedera.services.state.migration.StateVersions.lastSoftwareVersionOf;
import static org.junit.jupiter.api.Assertions.*;
@@ -35,6 +37,7 @@ class StateVersionsTest {
void getsExpectedLastVersionsForSupportedMigrationsOnly() {
assertSame(LAST_025X_VERSION, lastSoftwareVersionOf(RELEASE_025X_VERSION));
assertSame(LAST_026X_VERSION, lastSoftwareVersionOf(RELEASE_0260_VERSION));
- assertNull(lastSoftwareVersionOf(RELEASE_0270_VERSION));
+ assertSame(LAST_027X_VERSION, lastSoftwareVersionOf(RELEASE_0270_VERSION));
+ assertNull(lastSoftwareVersionOf(RELEASE_0280_VERSION));
}
}
\\ No newline at end of file | ['hedera-node/src/test/java/com/hedera/services/state/migration/StateVersionsTest.java', 'hedera-node/src/test/java/com/hedera/services/ServicesStateTest.java', 'hedera-node/src/main/java/com/hedera/services/ServicesState.java', 'hedera-node/src/main/java/com/hedera/services/state/migration/StateVersions.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 9,044,257 | 2,212,175 | 257,511 | 1,558 | 803 | 205 | 11 | 2 | 1,395 | 95 | 388 | 43 | 1 | 1 | 1970-01-01T00:27:37 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,453 | hashgraph/hedera-services/3607/3597 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/3597 | https://github.com/hashgraph/hedera-services/pull/3607 | https://github.com/hashgraph/hedera-services/pull/3607 | 1 | fixes | Smart contract Allowance Solidity Interfaces Incorrectly Implemented | The interfaces as described in [HIP-218](https://hips.hedera.com/hip/hip-218) have only been implemented for HTS redirect, not for direct HTS calls.
We should implement a set of methods against the HTS Precompile address (0x167) that includes the token ID as an argument,
* allowance(address token, address owner, address spender)
* approve(address token, address spender, uint256 amount)
* approveNFT(address token, address to, uint256 tokenId)
* setApprovalForAll(address token, address operator, bool approved)
* isApprovedForAll(address token, address owner, address operator)
* isApproved(address token, uint256 tokenId)
| 233ec8069cc02f003fa8badabaef24899e9e9af5 | 4824a50a27bcbbf785a9c50b5afc3d91a62a44c8 | https://github.com/hashgraph/hedera-services/compare/233ec8069cc02f003fa8badabaef24899e9e9af5...4824a50a27bcbbf785a9c50b5afc3d91a62a44c8 | diff --git a/hedera-node/src/main/java/com/hedera/services/exceptions/InvalidTransactionException.java b/hedera-node/src/main/java/com/hedera/services/exceptions/InvalidTransactionException.java
index 81084c2ab6..d330601b2c 100644
--- a/hedera-node/src/main/java/com/hedera/services/exceptions/InvalidTransactionException.java
+++ b/hedera-node/src/main/java/com/hedera/services/exceptions/InvalidTransactionException.java
@@ -34,27 +34,27 @@ import org.apache.tuweni.bytes.Bytes;
* detail message in the constructor.
*/
public class InvalidTransactionException extends RuntimeException {
- private static final String REVERT_REASON_TAG_START = "{{";
- private static final String REVERT_REASON_TAG_END = "}}";
private final ResponseCodeEnum responseCode;
+ private final boolean reverting;
public InvalidTransactionException(final ResponseCodeEnum responseCode) {
- super(responseCode.name());
- this.responseCode = responseCode;
+ this(responseCode.name(), responseCode, false);
}
-
- public InvalidTransactionException(final String detailMessage, final ResponseCodeEnum responseCode) {
- super(detailMessage);
- this.responseCode = responseCode;
+
+ public InvalidTransactionException(final ResponseCodeEnum responseCode, boolean reverting) {
+ this(responseCode.name(), responseCode, reverting);
}
- public static InvalidTransactionException fromReverting(final ResponseCodeEnum code) {
- return new InvalidTransactionException(revertingDetail(code.name()), code);
+ public InvalidTransactionException(final String detailMessage, final ResponseCodeEnum responseCode) {
+ this(detailMessage, responseCode, false);
}
- public static InvalidTransactionException fromReverting(final ResponseCodeEnum code, final String reason) {
- return new InvalidTransactionException(revertingDetail(reason), code);
+ public InvalidTransactionException(final String detailMessage, final ResponseCodeEnum responseCode,
+ boolean reverting) {
+ super(detailMessage);
+ this.responseCode = responseCode;
+ this.reverting = reverting;
}
public ResponseCodeEnum getResponseCode() {
@@ -62,7 +62,7 @@ public class InvalidTransactionException extends RuntimeException {
}
public boolean isReverting() {
- return getMessage().startsWith(REVERT_REASON_TAG_START);
+ return reverting;
}
public Bytes getRevertReason() {
@@ -70,11 +70,6 @@ public class InvalidTransactionException extends RuntimeException {
throw new IllegalStateException();
}
final var detail = getMessage();
- return Bytes.of(
- detail.substring(REVERT_REASON_TAG_START.length(), detail.indexOf(REVERT_REASON_TAG_END)).getBytes());
- }
-
- private static String revertingDetail(final String revertReason) {
- return REVERT_REASON_TAG_START + revertReason + REVERT_REASON_TAG_END;
+ return Bytes.of(detail.getBytes());
}
}
diff --git a/hedera-node/src/main/java/com/hedera/services/exceptions/ValidationUtils.java b/hedera-node/src/main/java/com/hedera/services/exceptions/ValidationUtils.java
index fbb1128e2b..d3cf2052fd 100644
--- a/hedera-node/src/main/java/com/hedera/services/exceptions/ValidationUtils.java
+++ b/hedera-node/src/main/java/com/hedera/services/exceptions/ValidationUtils.java
@@ -39,7 +39,7 @@ public final class ValidationUtils {
public static void validateTrueOrRevert(final boolean flag, final ResponseCodeEnum code) {
if (!flag) {
- throw InvalidTransactionException.fromReverting(code, code.name());
+ throw new InvalidTransactionException(code, true);
}
}
diff --git a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/AbiConstants.java b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/AbiConstants.java
index 0dab06036b..e3db941d58 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/AbiConstants.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/AbiConstants.java
@@ -58,35 +58,35 @@ public final class AbiConstants {
//redirectForToken(address token, bytes memory data)
public static final int ABI_ID_REDIRECT_FOR_TOKEN = 0x618dc65e;
//name()
- public static final int ABI_ID_NAME = 0x06fdde03;
+ public static final int ABI_ID_ERC_NAME = 0x06fdde03;
//symbol()
- public static final int ABI_ID_SYMBOL = 0x95d89b41;
+ public static final int ABI_ID_ERC_SYMBOL = 0x95d89b41;
//decimals()
- public static final int ABI_ID_DECIMALS = 0x313ce567;
+ public static final int ABI_ID_ERC_DECIMALS = 0x313ce567;
//totalSupply()
- public static final int ABI_ID_TOTAL_SUPPLY_TOKEN = 0x18160ddd;
+ public static final int ABI_ID_ERC_TOTAL_SUPPLY_TOKEN = 0x18160ddd;
//balanceOf(address account)
- public static final int ABI_ID_BALANCE_OF_TOKEN = 0x70a08231;
+ public static final int ABI_ID_ERC_BALANCE_OF_TOKEN = 0x70a08231;
//transfer(address recipient, uint256 amount)
public static final int ABI_ID_ERC_TRANSFER = 0xa9059cbb;
//transferFrom(address sender, address recipient, uint256 amount)
//transferFrom(address from, address to, uint256 tokenId)
public static final int ABI_ID_ERC_TRANSFER_FROM = 0x23b872dd;
- //allowance(address token, address owner, address spender)
- public static final int ABI_ID_ALLOWANCE = 0xdd62ed3e;
- //approve(address token, address spender, uint256 amount)
- //approve(address token, address to, uint256 tokenId)
- public static final int ABI_ID_APPROVE = 0x95ea7b3;
- //setApprovalForAll(address token, address operator, bool approved)
- public static final int ABI_ID_SET_APPROVAL_FOR_ALL = 0xa22cb465;
- //getApproved(address token, uint256 tokenId)
- public static final int ABI_ID_GET_APPROVED = 0x081812fc;
- //isApprovedForAll(address token, address owner, address operator)
- public static final int ABI_ID_IS_APPROVED_FOR_ALL = 0xe985e9c5;
+ //allowance(address owner, address spender)
+ public static final int ABI_ID_ERC_ALLOWANCE = 0xdd62ed3e;
+ //approve(address spender, uint256 amount)
+ //approve(address to, uint256 tokenId)
+ public static final int ABI_ID_ERC_APPROVE = 0x95ea7b3;
+ //setApprovalForAll(address operator, bool approved)
+ public static final int ABI_ID_ERC_SET_APPROVAL_FOR_ALL = 0xa22cb465;
+ //getApproved(uint256 tokenId)
+ public static final int ABI_ID_ERC_GET_APPROVED = 0x081812fc;
+ //isApprovedForAll(address owner, address operator)
+ public static final int ABI_ID_ERC_IS_APPROVED_FOR_ALL = 0xe985e9c5;
//ownerOf(uint256 tokenId)
- public static final int ABI_ID_OWNER_OF_NFT = 0x6352211e;
+ public static final int ABI_ID_ERC_OWNER_OF_NFT = 0x6352211e;
//tokenURI(uint256 tokenId)
- public static final int ABI_ID_TOKEN_URI_NFT = 0xc87b56dd;
+ public static final int ABI_ID_ERC_TOKEN_URI_NFT = 0xc87b56dd;
//Transfer(address indexed from, address indexed to, uint256 indexed tokenId)
//Transfer(address indexed from, address indexed to, uint256 value)
public static final Bytes TRANSFER_EVENT = Bytes.fromHexString(
diff --git a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContract.java b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContract.java
index dc89a11b9b..d2323814fc 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContract.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContract.java
@@ -86,6 +86,7 @@ import javax.inject.Provider;
import javax.inject.Singleton;
import java.util.Collections;
import java.util.Optional;
+import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import static com.hedera.services.exceptions.ValidationUtils.validateTrue;
@@ -108,7 +109,7 @@ public class HTSPrecompiledContract extends AbstractPrecompiledContract {
EntityId.fromGrpcContractId(HTS_PRECOMPILE_MIRROR_ID);
private static final PrecompileContractResult NO_RESULT = new PrecompileContractResult(
- null, true, MessageFrame.State.COMPLETED_FAILED, Optional.empty());
+ null, true, MessageFrame.State.COMPLETED_FAILED, Optional.empty());
private static final Bytes STATIC_CALL_REVERT_REASON = Bytes.of("HTS precompiles are not static".getBytes());
private static final String NOT_SUPPORTED_FUNGIBLE_OPERATION_REASON = "Invalid operation for ERC-20 token!";
@@ -208,7 +209,8 @@ public class HTSPrecompiledContract extends AbstractPrecompiledContract {
gasRequirement = precompile.getGasRequirement(now);
Bytes result = computeInternal(frame);
- return result == null ? PrecompiledContract.PrecompileContractResult.halt((Bytes)null, Optional.of(ExceptionalHaltReason.NONE)) : PrecompiledContract.PrecompileContractResult.success(result);
+ return result == null ? PrecompiledContract.PrecompileContractResult.halt(null,
+ Optional.of(ExceptionalHaltReason.NONE)) : PrecompiledContract.PrecompileContractResult.success(result);
}
void prepareFields(final MessageFrame frame) {
@@ -235,7 +237,7 @@ public class HTSPrecompiledContract extends AbstractPrecompiledContract {
AbiConstants.ABI_ID_TRANSFER_NFTS,
AbiConstants.ABI_ID_TRANSFER_NFT -> new TransferPrecompile(
ledgers, decoder, updater, sigsVerifier, sideEffectsTracker, syntheticTxnFactory,
- infrastructureFactory, precompilePricingUtils, functionId, senderAddress, impliedTransfersMarshal);
+ infrastructureFactory, precompilePricingUtils, functionId, senderAddress, impliedTransfersMarshal);
case AbiConstants.ABI_ID_MINT_TOKEN -> new MintPrecompile(
ledgers, decoder, encoder, updater.aliases(), sigsVerifier, recordsHistorian,
sideEffectsTracker, syntheticTxnFactory, infrastructureFactory, precompilePricingUtils);
@@ -262,104 +264,84 @@ public class HTSPrecompiledContract extends AbstractPrecompiledContract {
final var target = DescriptorUtils.getRedirectTarget(input);
final var tokenId = target.tokenId();
final var isFungibleToken = TokenType.FUNGIBLE_COMMON.equals(ledgers.typeOf(tokenId));
- Precompile nestedPrecompile;
final var nestedFunctionSelector = target.descriptor();
- if (AbiConstants.ABI_ID_NAME == nestedFunctionSelector) {
- nestedPrecompile = new NamePrecompile(
+ yield switch (nestedFunctionSelector) {
+ case AbiConstants.ABI_ID_ERC_NAME -> new NamePrecompile(
tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_SYMBOL == nestedFunctionSelector) {
- nestedPrecompile = new SymbolPrecompile(
+ case AbiConstants.ABI_ID_ERC_SYMBOL -> new SymbolPrecompile(
tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_DECIMALS == nestedFunctionSelector) {
- if (!isFungibleToken) {
- throw new InvalidTransactionException(
- NOT_SUPPORTED_NON_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
- }
- nestedPrecompile = new DecimalsPrecompile(
+ case AbiConstants.ABI_ID_ERC_DECIMALS ->
+ checkFungible(isFungibleToken, () -> new DecimalsPrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ case AbiConstants.ABI_ID_ERC_TOTAL_SUPPLY_TOKEN -> new TotalSupplyPrecompile(
tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_TOTAL_SUPPLY_TOKEN == nestedFunctionSelector) {
- nestedPrecompile = new TotalSupplyPrecompile(
+ case AbiConstants.ABI_ID_ERC_BALANCE_OF_TOKEN -> new BalanceOfPrecompile(
tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_BALANCE_OF_TOKEN == nestedFunctionSelector) {
- nestedPrecompile = new BalanceOfPrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_OWNER_OF_NFT == nestedFunctionSelector) {
- if (isFungibleToken) {
- throw new InvalidTransactionException(
- NOT_SUPPORTED_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
- }
- nestedPrecompile = new OwnerOfPrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_TOKEN_URI_NFT == nestedFunctionSelector) {
- if (isFungibleToken) {
- throw new InvalidTransactionException(
- NOT_SUPPORTED_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
- }
- nestedPrecompile = new TokenURIPrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_ERC_TRANSFER == nestedFunctionSelector) {
- if (!isFungibleToken) {
- throw new InvalidTransactionException(
- NOT_SUPPORTED_NON_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
- }
- nestedPrecompile = new ERCTransferPrecompile( tokenId, senderAddress, isFungibleToken,
- ledgers, decoder, encoder, updater, sigsVerifier, sideEffectsTracker,
- syntheticTxnFactory, infrastructureFactory, precompilePricingUtils, functionId,
- impliedTransfersMarshal);
- } else if (AbiConstants.ABI_ID_ERC_TRANSFER_FROM == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new ERCTransferPrecompile( tokenId, senderAddress, isFungibleToken,
- ledgers, decoder, encoder, updater, sigsVerifier, sideEffectsTracker,
- syntheticTxnFactory, infrastructureFactory, precompilePricingUtils, functionId,
- impliedTransfersMarshal);
- } else if (AbiConstants.ABI_ID_ALLOWANCE == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new AllowancePrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_APPROVE == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new ApprovePrecompile(
- tokenId, isFungibleToken, ledgers, decoder, encoder, currentView, sideEffectsTracker,
- syntheticTxnFactory, infrastructureFactory, precompilePricingUtils, senderAddress);
- } else if (AbiConstants.ABI_ID_SET_APPROVAL_FOR_ALL == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new SetApprovalForAllPrecompile(
- tokenId, ledgers, decoder, currentView, sideEffectsTracker, syntheticTxnFactory,
- infrastructureFactory, precompilePricingUtils, senderAddress);
- } else if (AbiConstants.ABI_ID_GET_APPROVED == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new GetApprovedPrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else if (AbiConstants.ABI_ID_IS_APPROVED_FOR_ALL == nestedFunctionSelector) {
- if (!dynamicProperties.areAllowancesEnabled()) {
- throw new InvalidTransactionException(NOT_SUPPORTED);
- }
- nestedPrecompile = new IsApprovedForAllPrecompile(
- tokenId, syntheticTxnFactory, ledgers, encoder, decoder, precompilePricingUtils);
- } else {
- nestedPrecompile = null;
- }
- yield nestedPrecompile;
+ case AbiConstants.ABI_ID_ERC_OWNER_OF_NFT ->
+ checkNFT(isFungibleToken, () -> new OwnerOfPrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ case AbiConstants.ABI_ID_ERC_TOKEN_URI_NFT ->
+ checkNFT(isFungibleToken, () -> new TokenURIPrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ case AbiConstants.ABI_ID_ERC_TRANSFER -> checkFungible(isFungibleToken,
+ () -> new ERCTransferPrecompile(tokenId, senderAddress, isFungibleToken,
+ ledgers, decoder, encoder, updater, sigsVerifier, sideEffectsTracker,
+ syntheticTxnFactory, infrastructureFactory, precompilePricingUtils,
+ functionId,
+ impliedTransfersMarshal));
+ case AbiConstants.ABI_ID_ERC_TRANSFER_FROM ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new ERCTransferPrecompile(tokenId, senderAddress, isFungibleToken,
+ ledgers, decoder, encoder, updater, sigsVerifier,
+ sideEffectsTracker,
+ syntheticTxnFactory, infrastructureFactory, precompilePricingUtils,
+ functionId,
+ impliedTransfersMarshal));
+ case AbiConstants.ABI_ID_ERC_ALLOWANCE ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new AllowancePrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ case AbiConstants.ABI_ID_ERC_APPROVE ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new ApprovePrecompile(
+ tokenId, isFungibleToken, ledgers, decoder, encoder, currentView,
+ sideEffectsTracker,
+ syntheticTxnFactory, infrastructureFactory, precompilePricingUtils,
+ senderAddress));
+ case AbiConstants.ABI_ID_ERC_SET_APPROVAL_FOR_ALL ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new SetApprovalForAllPrecompile(
+ tokenId, ledgers, decoder, currentView, sideEffectsTracker,
+ syntheticTxnFactory,
+ infrastructureFactory, precompilePricingUtils, senderAddress));
+ case AbiConstants.ABI_ID_ERC_GET_APPROVED ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new GetApprovedPrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ case AbiConstants.ABI_ID_ERC_IS_APPROVED_FOR_ALL ->
+ checkFeatureFlag(dynamicProperties.areAllowancesEnabled(),
+ () -> new IsApprovedForAllPrecompile(
+ tokenId, syntheticTxnFactory, ledgers, encoder, decoder,
+ precompilePricingUtils));
+ default -> null;
+ };
}
case AbiConstants.ABI_ID_CREATE_FUNGIBLE_TOKEN,
AbiConstants.ABI_ID_CREATE_FUNGIBLE_TOKEN_WITH_FEES,
AbiConstants.ABI_ID_CREATE_NON_FUNGIBLE_TOKEN,
- AbiConstants.ABI_ID_CREATE_NON_FUNGIBLE_TOKEN_WITH_FEES -> (dynamicProperties.isHTSPrecompileCreateEnabled())
- ? new TokenCreatePrecompile(
+ AbiConstants.ABI_ID_CREATE_NON_FUNGIBLE_TOKEN_WITH_FEES ->
+ (dynamicProperties.isHTSPrecompileCreateEnabled())
+ ? new TokenCreatePrecompile(
ledgers, decoder, encoder, updater, sigsVerifier, recordsHistorian,
- sideEffectsTracker, syntheticTxnFactory, infrastructureFactory, functionId, senderAddress,
- dynamicProperties.fundingAccount(), feeCalculator, precompilePricingUtils)
- : null;
+ sideEffectsTracker, syntheticTxnFactory, infrastructureFactory, functionId,
+ senderAddress,
+ dynamicProperties.fundingAccount(), feeCalculator, precompilePricingUtils)
+ : null;
default -> null;
};
if (precompile != null) {
@@ -378,6 +360,32 @@ public class HTSPrecompiledContract extends AbstractPrecompiledContract {
}
}
+ private Precompile checkNFT(boolean isFungible, Supplier<Precompile> precompileSupplier) {
+ if (isFungible) {
+ throw new InvalidTransactionException(
+ NOT_SUPPORTED_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
+ } else {
+ return precompileSupplier.get();
+ }
+ }
+
+ private Precompile checkFungible(boolean isFungible, Supplier<Precompile> precompileSupplier) {
+ if (!isFungible) {
+ throw new InvalidTransactionException(
+ NOT_SUPPORTED_NON_FUNGIBLE_OPERATION_REASON, INVALID_TOKEN_ID);
+ } else {
+ return precompileSupplier.get();
+ }
+ }
+
+ private Precompile checkFeatureFlag(boolean featureFlag, Supplier<Precompile> precompileSupplier) {
+ if (!featureFlag) {
+ throw new InvalidTransactionException(NOT_SUPPORTED);
+ } else {
+ return precompileSupplier.get();
+ }
+ }
+
@SuppressWarnings("rawtypes")
protected Bytes computeInternal(final MessageFrame frame) {
Bytes result;
diff --git a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ApprovePrecompile.java b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ApprovePrecompile.java
index 68ad190f70..daea9ce3cb 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ApprovePrecompile.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ApprovePrecompile.java
@@ -162,7 +162,7 @@ public class ApprovePrecompile extends AbstractWritePrecompile {
transactionBody.getCryptoApproveAllowance().getNftAllowancesList(),
grpcOperatorId);
} catch (InvalidTransactionException e) {
- throw InvalidTransactionException.fromReverting(e.getResponseCode());
+ throw new InvalidTransactionException(e.getResponseCode(), true);
}
}
final var precompileAddress = Address.fromHexString(HTS_PRECOMPILED_CONTRACT_ADDRESS);
diff --git a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ERCTransferPrecompile.java b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ERCTransferPrecompile.java
index 03bc2974b5..ea2f72b373 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ERCTransferPrecompile.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ERCTransferPrecompile.java
@@ -112,7 +112,7 @@ public class ERCTransferPrecompile extends TransferPrecompile {
try {
super.run(frame);
} catch (InvalidTransactionException e) {
- throw InvalidTransactionException.fromReverting(e.getResponseCode());
+ throw new InvalidTransactionException(e.getResponseCode(), true);
}
final var precompileAddress = Address.fromHexString(HTS_PRECOMPILED_CONTRACT_ADDRESS);
diff --git a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutor.java b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutor.java
index 2449999474..09c335660d 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutor.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutor.java
@@ -34,13 +34,13 @@ import static com.hedera.services.exceptions.ValidationUtils.validateFalse;
import static com.hedera.services.exceptions.ValidationUtils.validateTrue;
import static com.hedera.services.state.enums.TokenType.FUNGIBLE_COMMON;
import static com.hedera.services.store.contracts.precompile.utils.DescriptorUtils.getRedirectTarget;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_BALANCE_OF_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DECIMALS;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_NAME;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_OWNER_OF_NFT;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SYMBOL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOKEN_URI_NFT;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOTAL_SUPPLY_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_BALANCE_OF_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_DECIMALS;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_NAME;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_OWNER_OF_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SYMBOL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOKEN_URI_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOTAL_SUPPLY_TOKEN;
import static com.hedera.services.utils.MiscUtils.asSecondsTimestamp;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.NOT_SUPPORTED;
@@ -82,31 +82,31 @@ public class RedirectViewExecutor {
final var selector = target.descriptor();
final var isFungibleToken = FUNGIBLE_COMMON.equals(ledgers.typeOf(tokenId));
final Bytes answer;
- if (selector == ABI_ID_NAME) {
+ if (selector == ABI_ID_ERC_NAME) {
final var name = ledgers.nameOf(tokenId);
answer = encoder.encodeName(name);
- } else if (selector == ABI_ID_SYMBOL) {
+ } else if (selector == ABI_ID_ERC_SYMBOL) {
final var symbol = ledgers.symbolOf(tokenId);
answer = encoder.encodeSymbol(symbol);
- } else if (selector == ABI_ID_DECIMALS) {
+ } else if (selector == ABI_ID_ERC_DECIMALS) {
validateTrue(isFungibleToken, INVALID_TOKEN_ID);
final var decimals = ledgers.decimalsOf(tokenId);
answer = encoder.encodeDecimals(decimals);
- } else if (selector == ABI_ID_TOTAL_SUPPLY_TOKEN) {
+ } else if (selector == ABI_ID_ERC_TOTAL_SUPPLY_TOKEN) {
final var totalSupply = ledgers.totalSupplyOf(tokenId);
answer = encoder.encodeTotalSupply(totalSupply);
- } else if (selector == ABI_ID_BALANCE_OF_TOKEN) {
+ } else if (selector == ABI_ID_ERC_BALANCE_OF_TOKEN) {
final var wrapper = decoder.decodeBalanceOf(input.slice(24), updater::unaliased);
final var balance = ledgers.balanceOf(wrapper.accountId(), tokenId);
answer = encoder.encodeBalance(balance);
- } else if (selector == ABI_ID_OWNER_OF_NFT) {
+ } else if (selector == ABI_ID_ERC_OWNER_OF_NFT) {
validateFalse(isFungibleToken, INVALID_TOKEN_ID);
final var wrapper = decoder.decodeOwnerOf(input.slice(24));
final var nftId = NftId.fromGrpc(tokenId, wrapper.serialNo());
final var owner = ledgers.ownerOf(nftId);
final var priorityAddress = ledgers.canonicalAddress(owner);
answer = encoder.encodeOwner(priorityAddress);
- } else if (selector == ABI_ID_TOKEN_URI_NFT) {
+ } else if (selector == ABI_ID_ERC_TOKEN_URI_NFT) {
validateFalse(isFungibleToken, INVALID_TOKEN_ID);
final var wrapper = decoder.decodeTokenUriNFT(input.slice(24));
final var nftId = NftId.fromGrpc(tokenId, wrapper.serialNo());
diff --git a/hedera-node/src/test/java/com/hedera/services/exceptions/InvalidTransactionExceptionTest.java b/hedera-node/src/test/java/com/hedera/services/exceptions/InvalidTransactionExceptionTest.java
index a0ed11ba1b..1743fb8b3b 100644
--- a/hedera-node/src/test/java/com/hedera/services/exceptions/InvalidTransactionExceptionTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/exceptions/InvalidTransactionExceptionTest.java
@@ -35,8 +35,8 @@ class InvalidTransactionExceptionTest {
void canBuildRevertingExceptionWithDetail() {
final var reason = "I don't like it!";
final var frameReason = Bytes.of(reason.getBytes());
- final var revertingEx = InvalidTransactionException.fromReverting(
- INVALID_ALLOWANCE_OWNER_ID, reason);
+ final var revertingEx = new InvalidTransactionException(
+ reason, INVALID_ALLOWANCE_OWNER_ID, true);
assertTrue(revertingEx.isReverting());
assertEquals(frameReason, revertingEx.getRevertReason());
@@ -45,8 +45,8 @@ class InvalidTransactionExceptionTest {
@Test
void canBuildRevertingExceptionNoDetail() {
final var frameReason = Bytes.of(INVALID_ALLOWANCE_OWNER_ID.name().getBytes());
- final var revertingEx = InvalidTransactionException.fromReverting(
- INVALID_ALLOWANCE_OWNER_ID);
+ final var revertingEx = new InvalidTransactionException(
+ INVALID_ALLOWANCE_OWNER_ID, true);
assertTrue(revertingEx.isReverting());
assertEquals(frameReason, revertingEx.getRevertReason());
diff --git a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC20PrecompilesTest.java b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC20PrecompilesTest.java
index 9578ef0c75..eec29af49a 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC20PrecompilesTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC20PrecompilesTest.java
@@ -67,7 +67,6 @@ import com.hedera.services.store.tokens.HederaTokenStore;
import com.hedera.services.txns.crypto.ApproveAllowanceLogic;
import com.hedera.services.txns.crypto.validators.ApproveAllowanceChecks;
import com.hedera.services.txns.crypto.validators.DeleteAllowanceChecks;
-import com.hedera.services.txns.token.validators.CreateChecks;
import com.hedera.services.utils.EntityIdUtils;
import com.hedera.services.utils.EntityNum;
import com.hederahashgraph.api.proto.java.AccountID;
@@ -113,21 +112,21 @@ import java.util.Optional;
import java.util.TreeMap;
import static com.hedera.services.state.EntityCreator.EMPTY_MEMO;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ALLOWANCE;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_APPROVE;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_BALANCE_OF_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DECIMALS;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_ALLOWANCE;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_APPROVE;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_BALANCE_OF_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_DECIMALS;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TRANSFER;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TRANSFER_FROM;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_GET_APPROVED;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_IS_APPROVED_FOR_ALL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_NAME;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_OWNER_OF_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_GET_APPROVED;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_IS_APPROVED_FOR_ALL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_NAME;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_OWNER_OF_NFT;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_REDIRECT_FOR_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SET_APPROVAL_FOR_ALL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SYMBOL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOKEN_URI_NFT;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOTAL_SUPPLY_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SET_APPROVAL_FOR_ALL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SYMBOL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOKEN_URI_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOTAL_SUPPLY_TOKEN;
import static com.hedera.services.store.contracts.precompile.HTSPrecompiledContract.HTS_PRECOMPILED_CONTRACT_ADDRESS;
import static com.hedera.services.store.contracts.precompile.HTSTestsUtil.AMOUNT;
import static com.hedera.services.store.contracts.precompile.HTSTestsUtil.NOT_SUPPORTED_FUNGIBLE_OPERATION_REASON;
@@ -286,7 +285,7 @@ class ERC20PrecompilesTest {
Bytes pretendArgumentsApprove = Bytes.concatenate(
Bytes.of(Integers.toBytes(ABI_ID_REDIRECT_FOR_TOKEN)),
fungibleTokenAddr,
- Bytes.of(Integers.toBytes(ABI_ID_APPROVE)));
+ Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE)));
// when:
subject.prepareFields(frame);
@@ -306,7 +305,7 @@ class ERC20PrecompilesTest {
Bytes pretendArgumentsAllowance = Bytes.concatenate(
Bytes.of(Integers.toBytes(ABI_ID_REDIRECT_FOR_TOKEN)),
fungibleTokenAddr,
- Bytes.of(Integers.toBytes(ABI_ID_ALLOWANCE)));
+ Bytes.of(Integers.toBytes(ABI_ID_ERC_ALLOWANCE)));
// when:
@@ -317,7 +316,7 @@ class ERC20PrecompilesTest {
Bytes pretendArgumentsApproveForAll = Bytes.concatenate(
Bytes.of(Integers.toBytes(ABI_ID_REDIRECT_FOR_TOKEN)),
fungibleTokenAddr,
- Bytes.of(Integers.toBytes(ABI_ID_SET_APPROVAL_FOR_ALL)));
+ Bytes.of(Integers.toBytes(ABI_ID_ERC_SET_APPROVAL_FOR_ALL)));
// when:
subject.prepareFields(frame);
@@ -327,7 +326,7 @@ class ERC20PrecompilesTest {
Bytes pretendArgumentsGetApproved = Bytes.concatenate(
Bytes.of(Integers.toBytes(ABI_ID_REDIRECT_FOR_TOKEN)),
fungibleTokenAddr,
- Bytes.of(Integers.toBytes(ABI_ID_GET_APPROVED)));
+ Bytes.of(Integers.toBytes(ABI_ID_ERC_GET_APPROVED)));
// when:
subject.prepareFields(frame);
@@ -337,7 +336,7 @@ class ERC20PrecompilesTest {
Bytes pretendArgumentsApprovedForAll = Bytes.concatenate(
Bytes.of(Integers.toBytes(ABI_ID_REDIRECT_FOR_TOKEN)),
fungibleTokenAddr,
- Bytes.of(Integers.toBytes(ABI_ID_IS_APPROVED_FOR_ALL)));
+ Bytes.of(Integers.toBytes(ABI_ID_ERC_IS_APPROVED_FOR_ALL)));
// when:
@@ -361,7 +360,7 @@ class ERC20PrecompilesTest {
@Test
void gasCalculationForReadOnlyMethod() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_NAME));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_NAME));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -455,7 +454,7 @@ class ERC20PrecompilesTest {
@Test
void name() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_NAME));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_NAME));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -494,7 +493,7 @@ class ERC20PrecompilesTest {
@Test
void symbol() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_SYMBOL));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_SYMBOL));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -528,7 +527,7 @@ class ERC20PrecompilesTest {
@Test
void decimals() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_DECIMALS));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_DECIMALS));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -564,7 +563,7 @@ class ERC20PrecompilesTest {
@Test
void totalSupply() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TOTAL_SUPPLY_TOKEN));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TOTAL_SUPPLY_TOKEN));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
given(creator.createSuccessfulSyntheticRecord(Collections.emptyList(), sideEffects, EMPTY_MEMO))
@@ -602,7 +601,7 @@ class ERC20PrecompilesTest {
TreeMap<FcTokenAllowanceId, Long> alowances = new TreeMap<>();
alowances.put(FcTokenAllowanceId.from(EntityNum.fromLong(token.getTokenNum()), EntityNum.fromLong(receiver.getAccountNum())), 10L);
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ALLOWANCE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_ALLOWANCE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.accounts()).willReturn(accounts);
given(dynamicProperties.areAllowancesEnabled()).willReturn(true);
@@ -644,7 +643,7 @@ class ERC20PrecompilesTest {
@Test
void balanceOf() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_BALANCE_OF_TOKEN));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_BALANCE_OF_TOKEN));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -690,7 +689,7 @@ class ERC20PrecompilesTest {
Map<FcTokenAllowanceId, Long> allowances = Map.of(fungibleAllowanceId, 0L);
givenPricingUtilsContext();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(feeCalculator.estimatedGasPriceInTinybars(HederaFunctionality.ContractCall, timestamp))
@@ -729,7 +728,7 @@ class ERC20PrecompilesTest {
List<NftAllowance> nftAllowances = new ArrayList<>();
Map<FcTokenAllowanceId, Long> allowances = Map.of(fungibleAllowanceId, 0L);
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
givenLedgers();
givenPricingUtilsContext();
@@ -952,7 +951,7 @@ class ERC20PrecompilesTest {
@Test
void ownerOfNotSupported() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_OWNER_OF_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_OWNER_OF_NFT));
Bytes pretendArguments = givenMinimalFrameContextWithoutParentUpdater(nestedPretendArguments);
given(wrappedLedgers.typeOf(token)).willReturn(TokenType.FUNGIBLE_COMMON);
@@ -965,7 +964,7 @@ class ERC20PrecompilesTest {
@Test
void tokenURINotSupported() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TOKEN_URI_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TOKEN_URI_NFT));
Bytes pretendArguments = givenMinimalFrameContextWithoutParentUpdater(nestedPretendArguments);
given(wrappedLedgers.typeOf(token)).willReturn(TokenType.FUNGIBLE_COMMON);
diff --git a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC721PrecompilesTest.java b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC721PrecompilesTest.java
index 4774ac771b..066bb23cc6 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC721PrecompilesTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC721PrecompilesTest.java
@@ -111,20 +111,20 @@ import java.util.Set;
import java.util.TreeSet;
import static com.hedera.services.state.EntityCreator.EMPTY_MEMO;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_APPROVE;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_BALANCE_OF_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DECIMALS;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_APPROVE;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_BALANCE_OF_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_DECIMALS;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TRANSFER;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TRANSFER_FROM;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_GET_APPROVED;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_IS_APPROVED_FOR_ALL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_NAME;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_OWNER_OF_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_GET_APPROVED;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_IS_APPROVED_FOR_ALL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_NAME;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_OWNER_OF_NFT;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_REDIRECT_FOR_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SET_APPROVAL_FOR_ALL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SYMBOL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOKEN_URI_NFT;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOTAL_SUPPLY_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SET_APPROVAL_FOR_ALL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SYMBOL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOKEN_URI_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOTAL_SUPPLY_TOKEN;
import static com.hedera.services.store.contracts.precompile.HTSPrecompiledContract.HTS_PRECOMPILED_CONTRACT_ADDRESS;
import static com.hedera.services.store.contracts.precompile.HTSTestsUtil.NOT_SUPPORTED_NON_FUNGIBLE_OPERATION_REASON;
import static com.hedera.services.store.contracts.precompile.HTSTestsUtil.TEST_CONSENSUS_TIME;
@@ -281,7 +281,7 @@ class ERC721PrecompilesTest {
@Test
void name() {
- Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_NAME)));
+ Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_ERC_NAME)));
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
given(creator.createSuccessfulSyntheticRecord(Collections.emptyList(), sideEffects, EMPTY_MEMO))
.willReturn(mockRecordBuilder);
@@ -310,7 +310,7 @@ class ERC721PrecompilesTest {
@Test
void symbol() {
- Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_SYMBOL)));
+ Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_ERC_SYMBOL)));
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
given(creator.createSuccessfulSyntheticRecord(Collections.emptyList(), sideEffects, EMPTY_MEMO))
.willReturn(mockRecordBuilder);
@@ -344,7 +344,7 @@ class ERC721PrecompilesTest {
EntityNum.fromLong(receiver.getAccountNum()));
allowances.add(fcTokenAllowanceId);
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_IS_APPROVED_FOR_ALL));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_IS_APPROVED_FOR_ALL));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.accounts()).willReturn(accounts);
given(accounts.contains(IS_APPROVE_FOR_ALL_WRAPPER.owner())).willReturn(true);
@@ -383,7 +383,7 @@ class ERC721PrecompilesTest {
@Test
void isApprovedForAllWorksWithOperatorMissing() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_IS_APPROVED_FOR_ALL));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_IS_APPROVED_FOR_ALL));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.accounts()).willReturn(accounts);
given(accounts.contains(IS_APPROVE_FOR_ALL_WRAPPER.owner())).willReturn(true);
@@ -423,7 +423,7 @@ class ERC721PrecompilesTest {
List<CryptoAllowance> cryptoAllowances = new ArrayList<>();
List<TokenAllowance> tokenAllowances = new ArrayList<>();
List<NftAllowance> nftAllowances = new ArrayList<>();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
givenLedgers();
givenPricingUtilsContext();
@@ -486,7 +486,7 @@ class ERC721PrecompilesTest {
List<CryptoAllowance> cryptoAllowances = new ArrayList<>();
List<TokenAllowance> tokenAllowances = new ArrayList<>();
List<NftAllowance> nftAllowances = new ArrayList<>();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.tokens()).willReturn(tokens);
@@ -542,7 +542,7 @@ class ERC721PrecompilesTest {
@Test
void approveSpender0WhenOwner() {
givenPricingUtilsContext();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.tokens()).willReturn(tokens);
@@ -603,7 +603,7 @@ class ERC721PrecompilesTest {
@Test
void approveSpender0WhenGrantedApproveForAll() {
givenPricingUtilsContext();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.tokens()).willReturn(tokens);
@@ -663,7 +663,7 @@ class ERC721PrecompilesTest {
@Test
void approveSpender0NoGoodIfNotPermissioned() {
givenPricingUtilsContext();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(feeCalculator.estimatedGasPriceInTinybars(HederaFunctionality.ContractCall, timestamp))
@@ -701,7 +701,7 @@ class ERC721PrecompilesTest {
@Test
void validatesImpliedNftApprovalDeletion() {
givenPricingUtilsContext();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.tokens()).willReturn(tokens);
given(wrappedLedgers.accounts()).willReturn(accounts);
@@ -758,7 +758,7 @@ class ERC721PrecompilesTest {
List<TokenAllowance> tokenAllowances = new ArrayList<>();
List<NftAllowance> nftAllowances = new ArrayList<>();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_APPROVE));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_APPROVE));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.tokens()).willReturn(tokens);
@@ -811,7 +811,7 @@ class ERC721PrecompilesTest {
List<TokenAllowance> tokenAllowances = new ArrayList<>();
List<NftAllowance> nftAllowances = new ArrayList<>();
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_SET_APPROVAL_FOR_ALL));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_SET_APPROVAL_FOR_ALL));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
givenLedgers();
givenPricingUtilsContext();
@@ -871,7 +871,7 @@ class ERC721PrecompilesTest {
EntityNum.fromLong(receiver.getAccountNum()));
allowances.add(fcTokenAllowanceId);
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_GET_APPROVED));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_GET_APPROVED));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(wrappedLedgers.nfts()).willReturn(nfts);
@@ -906,7 +906,7 @@ class ERC721PrecompilesTest {
@Test
void totalSupply() {
- Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_TOTAL_SUPPLY_TOKEN)));
+ Bytes pretendArguments = givenMinimalFrameContext(Bytes.of(Integers.toBytes(ABI_ID_ERC_TOTAL_SUPPLY_TOKEN)));
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
given(creator.createSuccessfulSyntheticRecord(Collections.emptyList(), sideEffects, EMPTY_MEMO))
.willReturn(mockRecordBuilder);
@@ -934,7 +934,7 @@ class ERC721PrecompilesTest {
@Test
void balanceOf() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_BALANCE_OF_TOKEN));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_BALANCE_OF_TOKEN));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -967,7 +967,7 @@ class ERC721PrecompilesTest {
@Test
void ownerOfHappyPathWorks() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_OWNER_OF_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_OWNER_OF_NFT));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -1001,7 +1001,7 @@ class ERC721PrecompilesTest {
@Test
void ownerOfRevertsWithMissingNft() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_OWNER_OF_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_OWNER_OF_NFT));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -1141,7 +1141,7 @@ class ERC721PrecompilesTest {
@Test
void erc721SystemFailureSurfacesResult() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_OWNER_OF_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_OWNER_OF_NFT));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -1169,7 +1169,7 @@ class ERC721PrecompilesTest {
@Test
void tokenURI() {
- Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TOKEN_URI_NFT));
+ Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TOKEN_URI_NFT));
Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments);
given(syntheticTxnFactory.createTransactionCall(1L, pretendArguments)).willReturn(mockSynthBodyBuilder);
@@ -1211,7 +1211,8 @@ class ERC721PrecompilesTest {
@Test
void decimalsNotSupported() {
- Bytes pretendArguments = givenMinimalFrameContextWithoutParentUpdater(Bytes.of(Integers.toBytes(ABI_ID_DECIMALS)));
+ Bytes pretendArguments = givenMinimalFrameContextWithoutParentUpdater(Bytes.of(Integers.toBytes(
+ ABI_ID_ERC_DECIMALS)));
given(wrappedLedgers.typeOf(token)).willReturn(TokenType.NON_FUNGIBLE_UNIQUE);
subject.prepareFields(frame);
diff --git a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContractTest.java b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContractTest.java
index 5f6a3ec820..73c33d4112 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContractTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContractTest.java
@@ -92,7 +92,7 @@ import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DISSOCIATE_TOKEN;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DISSOCIATE_TOKENS;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_MINT_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_NAME;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_NAME;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_REDIRECT_FOR_TOKEN;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TRANSFER_NFT;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TRANSFER_NFTS;
@@ -215,7 +215,7 @@ class HTSPrecompiledContractTest {
void computeCostedWorks() {
given(worldUpdater.trackingLedgers()).willReturn(wrappedLedgers);
given(wrappedLedgers.typeOf(fungible)).willReturn(TokenType.FUNGIBLE_COMMON);
- Bytes input = prerequisites(ABI_ID_NAME);
+ Bytes input = prerequisites(ABI_ID_ERC_NAME);
given(messageFrame.isStatic()).willReturn(true);
given(messageFrame.getWorldUpdater()).willReturn(worldUpdater);
given(worldUpdater.hasMutableLedgers()).willReturn(false);
diff --git a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutorTest.java b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutorTest.java
index e42b14d6ea..7205dda549 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutorTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutorTest.java
@@ -46,14 +46,14 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_BALANCE_OF_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_DECIMALS;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_NAME;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_OWNER_OF_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_BALANCE_OF_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_DECIMALS;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_NAME;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_OWNER_OF_NFT;
import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_REDIRECT_FOR_TOKEN;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_SYMBOL;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOKEN_URI_NFT;
-import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_TOTAL_SUPPLY_TOKEN;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_SYMBOL;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOKEN_URI_NFT;
+import static com.hedera.services.store.contracts.precompile.AbiConstants.ABI_ID_ERC_TOTAL_SUPPLY_TOKEN;
import static com.hedera.services.store.contracts.precompile.proxy.RedirectViewExecutor.MINIMUM_TINYBARS_COST;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
@@ -99,7 +99,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedNAME() {
- prerequisites(ABI_ID_NAME, fungibleTokenAddress);
+ prerequisites(ABI_ID_ERC_NAME, fungibleTokenAddress);
final var result = "name";
@@ -111,7 +111,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedSYMBOL() {
- prerequisites(ABI_ID_SYMBOL, fungibleTokenAddress);
+ prerequisites(ABI_ID_ERC_SYMBOL, fungibleTokenAddress);
final var result = "symbol";
@@ -123,7 +123,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedDECIMALS() {
- prerequisites(ABI_ID_DECIMALS, fungibleTokenAddress);
+ prerequisites(ABI_ID_ERC_DECIMALS, fungibleTokenAddress);
final var result = 1;
@@ -136,7 +136,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedTOTAL_SUPPY_TOKEN() {
- prerequisites(ABI_ID_TOTAL_SUPPLY_TOKEN, fungibleTokenAddress);
+ prerequisites(ABI_ID_ERC_TOTAL_SUPPLY_TOKEN, fungibleTokenAddress);
final var result = 1L;
@@ -148,7 +148,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedBALANCE_OF_TOKEN() {
- Bytes nestedInput = prerequisites(ABI_ID_BALANCE_OF_TOKEN, fungibleTokenAddress);
+ Bytes nestedInput = prerequisites(ABI_ID_ERC_BALANCE_OF_TOKEN, fungibleTokenAddress);
final var result = 1L;
@@ -162,7 +162,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedOWNER_OF_NFT() {
- Bytes nestedInput = prerequisites(ABI_ID_OWNER_OF_NFT, nonfungibleTokenAddress);
+ Bytes nestedInput = prerequisites(ABI_ID_ERC_OWNER_OF_NFT, nonfungibleTokenAddress);
final var result = Address.fromHexString("0x000000000000013");
final var serialNum = 1L;
@@ -178,7 +178,7 @@ class RedirectViewExecutorTest {
@Test
void computeCostedTOKEN_URI_NFT() {
- Bytes nestedInput = prerequisites(ABI_ID_TOKEN_URI_NFT, nonfungibleTokenAddress);
+ Bytes nestedInput = prerequisites(ABI_ID_ERC_TOKEN_URI_NFT, nonfungibleTokenAddress);
final var result = "some metadata";
final var serialNum = 1L;
diff --git a/hedera-node/src/test/java/com/hedera/services/txns/crypto/validators/DeleteAllowanceChecksTest.java b/hedera-node/src/test/java/com/hedera/services/txns/crypto/validators/DeleteAllowanceChecksTest.java
index ed8593942d..86c32c6427 100644
--- a/hedera-node/src/test/java/com/hedera/services/txns/crypto/validators/DeleteAllowanceChecksTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/txns/crypto/validators/DeleteAllowanceChecksTest.java
@@ -183,7 +183,7 @@ class DeleteAllowanceChecksTest {
@Test
void rejectsMissingToken() {
given(tokenStore.loadPossiblyPausedToken(Id.fromGrpcToken(nftToken)))
- .willThrow(InvalidTransactionException.fromReverting(INVALID_TOKEN_ID));
+ .willThrow(new InvalidTransactionException(INVALID_TOKEN_ID, true));
nftAllowances.add(nftAllowance2);
assertEquals(INVALID_TOKEN_ID,
subject.validateNftDeleteAllowances(nftAllowances, payer, accountStore, tokenStore)); | ['hedera-node/src/test/java/com/hedera/services/txns/crypto/validators/DeleteAllowanceChecksTest.java', 'hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContract.java', 'hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ApprovePrecompile.java', 'hedera-node/src/main/java/com/hedera/services/exceptions/ValidationUtils.java', 'hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/AbiConstants.java', 'hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutorTest.java', 'hedera-node/src/main/java/com/hedera/services/exceptions/InvalidTransactionException.java', 'hedera-node/src/test/java/com/hedera/services/exceptions/InvalidTransactionExceptionTest.java', 'hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/HTSPrecompiledContractTest.java', 'hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC20PrecompilesTest.java', 'hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/proxy/RedirectViewExecutor.java', 'hedera-node/src/test/java/com/hedera/services/store/contracts/precompile/ERC721PrecompilesTest.java', 'hedera-node/src/main/java/com/hedera/services/store/contracts/precompile/impl/ERCTransferPrecompile.java'] | {'.java': 13} | 13 | 13 | 0 | 0 | 13 | 8,964,006 | 2,203,431 | 256,298 | 1,553 | 16,943 | 3,946 | 293 | 7 | 638 | 79 | 145 | 10 | 1 | 0 | 1970-01-01T00:27:36 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,454 | hashgraph/hedera-services/3513/3512 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/3512 | https://github.com/hashgraph/hedera-services/pull/3513 | https://github.com/hashgraph/hedera-services/pull/3513 | 1 | fixes | ContractCallLocalQuery does not use sender_id field | ### Description
The `ContractCallLocalQuery` call does not use the sender_id, instead it always uses the context of the transaction caller for the query.
Also, the accounts should be de-aliased.
### Steps to reproduce
Pseudo code...
```
var call = new ContractCallQuery()
.setFunction("theFunction")
.setContractId(theContractId)
.setGas(400000)
.setSenderAccountId(senderAccountId)
.execute(clientWithOperatorNotSenderId);
```
For this the senderAcountId will not be the 'tx.origin' but the operator in the client will be the operator.
### Additional context
_No response_
### Hedera network
mainnet, testnet, previewnet
### Version
0.25, 0.26
### Operating system
_No response_ | b72e968bbcdd18f52823e9543891c6a568fdbe58 | 3fa57097bf7f7fc8779c1332d96f3cc7936aa912 | https://github.com/hashgraph/hedera-services/compare/b72e968bbcdd18f52823e9543891c6a568fdbe58...3fa57097bf7f7fc8779c1332d96f3cc7936aa912 | diff --git a/hedera-node/src/main/java/com/hedera/services/contracts/execution/CallLocalExecutor.java b/hedera-node/src/main/java/com/hedera/services/contracts/execution/CallLocalExecutor.java
index c799d25f74..ee889d1dc0 100644
--- a/hedera-node/src/main/java/com/hedera/services/contracts/execution/CallLocalExecutor.java
+++ b/hedera-node/src/main/java/com/hedera/services/contracts/execution/CallLocalExecutor.java
@@ -26,7 +26,6 @@ import com.hedera.services.ledger.accounts.AliasManager;
import com.hedera.services.store.AccountStore;
import com.hedera.services.store.contracts.EntityAccess;
import com.hedera.services.store.models.Account;
-import com.hedera.services.store.models.Id;
import com.hedera.services.utils.EntityIdUtils;
import com.hedera.services.utils.ResponseCodeUtil;
import com.hedera.services.utils.accessors.SignedTxnAccessor;
@@ -74,7 +73,9 @@ public class CallLocalExecutor {
) {
try {
final var paymentTxn = SignedTxnAccessor.uncheckedFrom(op.getHeader().getPayment()).getTxn();
- final var senderId = Id.fromGrpcAccount(paymentTxn.getTransactionID().getAccountID());
+ final var senderId = EntityIdUtils.unaliased(op.hasSenderId()
+ ? op.getSenderId()
+ : paymentTxn.getTransactionID().getAccountID(), aliasManager).toId();
final var idOrAlias = op.getContractID();
final var contractId = EntityIdUtils.unaliased(idOrAlias, aliasManager).toId();
diff --git a/hedera-node/src/main/java/com/hedera/services/utils/EntityIdUtils.java b/hedera-node/src/main/java/com/hedera/services/utils/EntityIdUtils.java
index 0bb0dac37c..e795c2467d 100644
--- a/hedera-node/src/main/java/com/hedera/services/utils/EntityIdUtils.java
+++ b/hedera-node/src/main/java/com/hedera/services/utils/EntityIdUtils.java
@@ -357,4 +357,29 @@ public final class EntityIdUtils {
return EntityNum.fromContractId(idOrAlias);
}
}
+
+ public static EntityNum unaliased(final AccountID idOrAlias, final AliasManager aliasManager) {
+ return unaliased(idOrAlias, aliasManager, null);
+ }
+
+ public static EntityNum unaliased(
+ final AccountID idOrAlias,
+ final AliasManager aliasManager,
+ @Nullable final Consumer<ByteString> aliasObs
+ ) {
+ if (isAlias(idOrAlias)) {
+ final var alias = idOrAlias.getAlias();
+ final var evmAddress = alias.toByteArray();
+ if (aliasManager.isMirror(evmAddress)) {
+ final var accountNum = Longs.fromByteArray(Arrays.copyOfRange(evmAddress, 12, 20));
+ return EntityNum.fromLong(accountNum);
+ }
+ if (aliasObs != null) {
+ aliasObs.accept(alias);
+ }
+ return aliasManager.lookupIdBy(alias);
+ } else {
+ return EntityNum.fromAccountId(idOrAlias);
+ }
+ }
}
diff --git a/hedera-node/src/test/java/com/hedera/services/contracts/execution/CallLocalExecutorTest.java b/hedera-node/src/test/java/com/hedera/services/contracts/execution/CallLocalExecutorTest.java
index a7f34ea9dd..5ccb96e26a 100644
--- a/hedera-node/src/test/java/com/hedera/services/contracts/execution/CallLocalExecutorTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/contracts/execution/CallLocalExecutorTest.java
@@ -29,6 +29,7 @@ import com.hedera.services.store.contracts.EntityAccess;
import com.hedera.services.store.models.Account;
import com.hedera.services.store.models.Id;
import com.hedera.services.utils.EntityNum;
+import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ContractCallLocalQuery;
import com.hederahashgraph.api.proto.java.ContractCallLocalResponse;
import com.hederahashgraph.api.proto.java.ContractID;
@@ -69,7 +70,8 @@ class CallLocalExecutorTest {
ByteString params = ByteString.copyFrom("Hungry, and...".getBytes());
Id callerID = new Id(0, 0, 123);
Id contractID = new Id(0, 0, 456);
-
+ Id senderID = new Id(0, 0, 789);
+
ContractCallLocalQuery query;
@Mock
@@ -114,6 +116,34 @@ class CallLocalExecutorTest {
assertEquals(expected, result);
}
+ @Test
+ void processingSuccessfulWithAccountAlias() {
+ // setup:
+ final var senderAlias = CommonUtils.unhex("6aea3773ea468a814d954e6dec795bfee7d76e25");
+ final var sender = AccountID.newBuilder()
+ .setAlias(ByteString.copyFrom(senderAlias))
+ .build();
+ query = localCallQuery(contractID.asGrpcContract(), sender, ANSWER_ONLY);
+ given(aliasManager.lookupIdBy(sender.getAlias())).willReturn(EntityNum.fromLong(senderID.num()));
+
+ final var transactionProcessingResult = TransactionProcessingResult
+ .successful(new ArrayList<>(), 0, 0, 1, Bytes.EMPTY,
+ callerID.asEvmAddress(), new TreeMap<>());
+ final var expected = response(OK,transactionProcessingResult);
+
+ given(accountStore.loadAccount(any())).willReturn(new Account(callerID));
+ given(accountStore.loadContract(contractID)).willReturn(new Account(contractID));
+ given(evmTxProcessor.execute(any(), any(), anyLong(), anyLong(), any(), any()))
+ .willReturn(transactionProcessingResult);
+
+ // when:
+ final var result =
+ CallLocalExecutor.execute(accountStore, evmTxProcessor, query, aliasManager, entityAccess);
+
+ // then:
+ assertEquals(expected, result);
+ }
+
@Test
void processingSuccessful() {
// setup:
@@ -256,4 +286,16 @@ class CallLocalExecutorTest {
.build())
.build();
}
+
+ private ContractCallLocalQuery localCallQuery(ContractID id, AccountID sender, ResponseType type) {
+ return ContractCallLocalQuery.newBuilder()
+ .setContractID(id)
+ .setGas(gas)
+ .setFunctionParameters(params)
+ .setHeader(QueryHeader.newBuilder()
+ .setResponseType(type)
+ .build())
+ .setSenderId(sender)
+ .build();
+ }
}
diff --git a/hedera-node/src/test/java/com/hedera/services/fees/calculation/contract/queries/ContractCallLocalResourceUsageTest.java b/hedera-node/src/test/java/com/hedera/services/fees/calculation/contract/queries/ContractCallLocalResourceUsageTest.java
index b3c25ffa0b..dba7537c68 100644
--- a/hedera-node/src/test/java/com/hedera/services/fees/calculation/contract/queries/ContractCallLocalResourceUsageTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/fees/calculation/contract/queries/ContractCallLocalResourceUsageTest.java
@@ -40,6 +40,7 @@ import com.hedera.test.extensions.LogCaptor;
import com.hedera.test.extensions.LogCaptureExtension;
import com.hedera.test.extensions.LoggingSubject;
import com.hedera.test.extensions.LoggingTarget;
+import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ContractCallLocalQuery;
import com.hederahashgraph.api.proto.java.ContractCallLocalResponse;
import com.hederahashgraph.api.proto.java.ContractID;
diff --git a/hedera-node/src/test/java/com/hedera/services/utils/EntityIdUtilsTest.java b/hedera-node/src/test/java/com/hedera/services/utils/EntityIdUtilsTest.java
index 7d1e7682c6..2886b89c34 100644
--- a/hedera-node/src/test/java/com/hedera/services/utils/EntityIdUtilsTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/utils/EntityIdUtilsTest.java
@@ -45,6 +45,7 @@ import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Arrays;
+import java.util.concurrent.atomic.AtomicReference;
import static com.hedera.services.utils.EntityIdUtils.asEvmAddress;
import static com.hedera.services.utils.EntityIdUtils.asLiteralString;
@@ -96,6 +97,47 @@ class EntityIdUtilsTest {
assertEquals(extantNum, unaliased(input, aliasManager));
}
+ @Test
+ void echoesUnaliasedAccountId() {
+ final var literalId = AccountID.newBuilder().setAccountNum(1234).build();
+
+ assertEquals(EntityNum.fromLong(1234), unaliased(literalId, aliasManager));
+ assertEquals(EntityNum.MISSING_NUM, unaliased(AccountID.getDefaultInstance(), aliasManager));
+ }
+
+ @Test
+ void useAliasDirectlyIfMirror() {
+ final byte[] mockAddr = unhex("0000000000000000000000009abcdefabcdefbbb");
+ final var num = Longs.fromByteArray(Arrays.copyOfRange(mockAddr, 12, 20));
+ final var expectedId = EntityNum.fromLong(num);
+ final var input = AccountID.newBuilder().setAlias(ByteString.copyFrom(mockAddr)).build();
+
+ given(aliasManager.isMirror(mockAddr)).willReturn(true);
+ assertEquals(expectedId, unaliased(input, aliasManager));
+ }
+
+ @Test
+ void returnsResolvedAccountIdIfNonMirro() {
+ final byte[] mockAddr = unhex("aaaaaaaaaaaaaaaaaaaaaaaa9abcdefabcdefbbb");
+ final var extantNum = EntityNum.fromLong(1_234_567L);
+ final var input = AccountID.newBuilder().setAlias(ByteString.copyFrom(mockAddr)).build();
+ given(aliasManager.lookupIdBy(ByteString.copyFrom(mockAddr))).willReturn(extantNum);
+
+ assertEquals(extantNum, unaliased(input, aliasManager));
+ }
+
+ @Test
+ void observesUnalising() {
+ final byte[] mockAddr = unhex("aaaaaaaaaaaaaaaaaaaaaaaa9abcdefabcdefbbb");
+ final var extantNum = EntityNum.fromLong(1_234_567L);
+ final var input = AccountID.newBuilder().setAlias(ByteString.copyFrom(mockAddr)).build();
+ given(aliasManager.lookupIdBy(ByteString.copyFrom(mockAddr))).willReturn(extantNum);
+
+ AtomicReference<ByteString> observer = new AtomicReference<>();
+ unaliased(input, aliasManager, observer::set);
+ assertEquals(ByteString.copyFrom(mockAddr), observer.get());
+ }
+
@Test
void correctLiteral() {
assertEquals("1.2.3", asLiteralString(asAccount("1.2.3"))); | ['hedera-node/src/test/java/com/hedera/services/utils/EntityIdUtilsTest.java', 'hedera-node/src/test/java/com/hedera/services/contracts/execution/CallLocalExecutorTest.java', 'hedera-node/src/test/java/com/hedera/services/fees/calculation/contract/queries/ContractCallLocalResourceUsageTest.java', 'hedera-node/src/main/java/com/hedera/services/contracts/execution/CallLocalExecutor.java', 'hedera-node/src/main/java/com/hedera/services/utils/EntityIdUtils.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 8,629,550 | 2,122,457 | 247,309 | 1,494 | 1,099 | 276 | 30 | 2 | 722 | 86 | 172 | 34 | 0 | 1 | 1970-01-01T00:27:34 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,455 | hashgraph/hedera-services/3136/3134 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/3134 | https://github.com/hashgraph/hedera-services/pull/3136 | https://github.com/hashgraph/hedera-services/pull/3136 | 1 | fixes | ApproveForAll in Approve/Adjust TransitionLogic doesn't check null value | ### Description
Since the protobufs used for `NftAllowance` has `BoolValue` and not `boolean` , we need to check if the value is set or not before checking the value using `allowance.hasApproveForAll`
.
If this is not checked, evn if the value is not set it is considered as false and in `CryptoAdjustAllowanceTransitionLogic` we will remove the entity from the `approveForAllSet`
### Version
v0.25.0-alpha.1
### Operating system
_No response_ | 29e06ef492f0a4ee06f61a38cadcc6d4a0cbf031 | a135bc4360df4bb2e9fffaad5098e9e1a933a096 | https://github.com/hashgraph/hedera-services/compare/29e06ef492f0a4ee06f61a38cadcc6d4a0cbf031...a135bc4360df4bb2e9fffaad5098e9e1a933a096 | diff --git a/hedera-node/src/main/java/com/hedera/services/store/AccountStore.java b/hedera-node/src/main/java/com/hedera/services/store/AccountStore.java
index 3dca51dd86..d2dcadf300 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/AccountStore.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/AccountStore.java
@@ -198,7 +198,7 @@ public class AccountStore {
mutableAccount.setSmartContract(model.isSmartContract());
mutableAccount.setCryptoAllowances(model.getMutableCryptoAllowances());
mutableAccount.setFungibleTokenAllowances(model.getMutableFungibleTokenAllowances());
- mutableAccount.setApproveForAllNfts(model.getMutableApprovedForAllNftsAllowances());
+ mutableAccount.setApproveForAllNfts(model.getMutableApprovedForAllNfts());
final var tokenAssociationMetadata = new TokenAssociationMetadata(
model.getNumAssociations(), model.getNumZeroBalances(), model.getLastAssociatedToken());
mutableAccount.setTokenAssociationMetadata(tokenAssociationMetadata);
diff --git a/hedera-node/src/main/java/com/hedera/services/store/models/Account.java b/hedera-node/src/main/java/com/hedera/services/store/models/Account.java
index 259872ec5c..7e1ef3ec7a 100644
--- a/hedera-node/src/main/java/com/hedera/services/store/models/Account.java
+++ b/hedera-node/src/main/java/com/hedera/services/store/models/Account.java
@@ -489,7 +489,7 @@ public class Account {
return approveForAllNfts == null ? Collections.emptySet() : approveForAllNfts;
}
- public SortedSet<FcTokenAllowanceId> getMutableApprovedForAllNftsAllowances() {
+ public SortedSet<FcTokenAllowanceId> getMutableApprovedForAllNfts() {
if (approveForAllNfts == null) {
approveForAllNfts = new TreeSet<>();
}
diff --git a/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogic.java b/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogic.java
index ad79bb6464..333955944c 100644
--- a/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogic.java
+++ b/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogic.java
@@ -197,7 +197,7 @@ public class CryptoAdjustAllowanceTransitionLogic implements TransitionLogic {
* @param payerAccount
* account of the payer for this adjustAllowance txn
*/
- private void adjustNftAllowances(final List<NftAllowance> nftAllowances, final Account payerAccount) {
+ void adjustNftAllowances(final List<NftAllowance> nftAllowances, final Account payerAccount) {
if (nftAllowances.isEmpty()) {
return;
}
@@ -206,27 +206,25 @@ public class CryptoAdjustAllowanceTransitionLogic implements TransitionLogic {
final var owner = allowance.getOwner();
final var accountToAdjust = fetchOwnerAccount(owner, payerAccount, accountStore, entitiesChanged);
- final var mutableApprovedForAllNftsAllowances = accountToAdjust.getMutableApprovedForAllNftsAllowances();
-
- final var spenderAccount = allowance.getSpender();
- final var approvedForAll = allowance.getApprovedForAll();
- final var serialNums = allowance.getSerialNumbersList();
- final var tokenID = allowance.getTokenId();
- final var tokenId = Id.fromGrpcToken(tokenID);
- final var spender = Id.fromGrpcAccount(spenderAccount);
+ final var mutableApprovedForAllNfts = accountToAdjust.getMutableApprovedForAllNfts();
+
+ final var tokenId = Id.fromGrpcToken(allowance.getTokenId());
+ final var spender = Id.fromGrpcAccount(allowance.getSpender());
+
accountStore.loadAccountOrFailWith(spender, INVALID_ALLOWANCE_SPENDER_ID);
- final var key = FcTokenAllowanceId.from(tokenId.asEntityNum(),
- spender.asEntityNum());
+ final var key = FcTokenAllowanceId.from(tokenId.asEntityNum(), spender.asEntityNum());
- if (approvedForAll.getValue()) {
- mutableApprovedForAllNftsAllowances.add(key);
- } else {
- mutableApprovedForAllNftsAllowances.remove(key);
+ if (allowance.hasApprovedForAll()) {
+ if (allowance.getApprovedForAll().getValue()) {
+ mutableApprovedForAllNfts.add(key);
+ } else {
+ mutableApprovedForAllNfts.remove(key);
+ }
}
validateAllowanceLimitsOn(accountToAdjust, dynamicProperties.maxAllowanceLimitPerAccount());
- final var nfts = updateSpender(tokenStore, accountToAdjust.getId(), spender, tokenId, serialNums);
+ final var nfts = updateSpender(tokenStore, accountToAdjust.getId(), spender, tokenId, allowance.getSerialNumbersList());
for (var nft : nfts) {
nftsTouched.put(nft.getNftId(), nft);
}
diff --git a/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogic.java b/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogic.java
index e21d3f8e90..2d4387349d 100644
--- a/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogic.java
+++ b/hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogic.java
@@ -184,30 +184,28 @@ public class CryptoApproveAllowanceTransitionLogic implements TransitionLogic {
* @param nftAllowances
* @param payerAccount
*/
- private void applyNftAllowances(final List<NftAllowance> nftAllowances, final Account payerAccount) {
+ void applyNftAllowances(final List<NftAllowance> nftAllowances, final Account payerAccount) {
if (nftAllowances.isEmpty()) {
return;
}
for (var allowance : nftAllowances) {
final var owner = allowance.getOwner();
final var accountToApprove = fetchOwnerAccount(owner, payerAccount, accountStore, entitiesChanged);
- final var approveForAllNftsSet = accountToApprove.getMutableApprovedForAllNftsAllowances();
+ final var approveForAllNftsSet = accountToApprove.getMutableApprovedForAllNfts();
final var spenderId = Id.fromGrpcAccount(allowance.getSpender());
accountStore.loadAccountOrFailWith(spenderId, INVALID_ALLOWANCE_SPENDER_ID);
- final var approvedForAll = allowance.getApprovedForAll();
- final var serialNums = allowance.getSerialNumbersList();
final var tokenId = Id.fromGrpcToken(allowance.getTokenId());
- if (approvedForAll.getValue()) {
+ if (allowance.hasApprovedForAll() && allowance.getApprovedForAll().getValue()) {
final var key = FcTokenAllowanceId.from(tokenId.asEntityNum(), spenderId.asEntityNum());
approveForAllNftsSet.add(key);
}
validateAllowanceLimitsOn(accountToApprove, dynamicProperties.maxAllowanceLimitPerAccount());
- final var nfts = updateSpender(tokenStore, accountToApprove.getId(), spenderId, tokenId, serialNums);
+ final var nfts = updateSpender(tokenStore, accountToApprove.getId(), spenderId, tokenId, allowance.getSerialNumbersList());
for (var nft : nfts) {
nftsTouched.put(nft.getNftId(), nft);
}
diff --git a/hedera-node/src/test/java/com/hedera/services/store/models/AccountTest.java b/hedera-node/src/test/java/com/hedera/services/store/models/AccountTest.java
index 7e8df0b0db..4b2014cba3 100644
--- a/hedera-node/src/test/java/com/hedera/services/store/models/AccountTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/store/models/AccountTest.java
@@ -161,7 +161,7 @@ class AccountTest {
assertTrue(subject.getMutableCryptoAllowances().isEmpty());
assertTrue(subject.getMutableFungibleTokenAllowances().isEmpty());
- assertTrue(subject.getMutableApprovedForAllNftsAllowances().isEmpty());
+ assertTrue(subject.getMutableApprovedForAllNfts().isEmpty());
}
@Test
diff --git a/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogicTest.java b/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogicTest.java
index b74ca1a2ef..9af4e2ab78 100644
--- a/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogicTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogicTest.java
@@ -64,6 +64,7 @@ import java.util.TreeSet;
import static com.hedera.test.utils.IdUtils.asAccount;
import static com.hedera.test.utils.IdUtils.asToken;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALLOWANCE_SPENDER_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MAX_ALLOWANCES_EXCEEDED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -295,6 +296,53 @@ class CryptoAdjustAllowanceTransitionLogicTest {
verify(txnCtx).setStatus(ResponseCodeEnum.SUCCESS);
}
+ @Test
+ void checkIfApproveForAllIsSetBeforeDeleting(){
+ var ownerAcccount = new Account(ownerId);
+ setUpOwnerWithSomeKeys(ownerAcccount);
+ nftAllowances.clear();
+ var spenderAccount = new Account(Id.fromGrpcAccount(spender1));
+
+ given(accountStore.loadAccountOrFailWith(Id.fromGrpcAccount(spender1), INVALID_ALLOWANCE_SPENDER_ID)).willReturn(spenderAccount);
+ ownerAcccount.setCryptoAllowances(new TreeMap<>());
+ ownerAcccount.setFungibleTokenAllowances(new TreeMap<>());
+ ownerAcccount.setApproveForAllNfts(new TreeSet<>());
+ given(dynamicProperties.maxAllowanceLimitPerAccount()).willReturn(100);
+ given(tokenStore.loadUniqueToken(tokenId2, serial1)).willReturn(nft1);
+ given(tokenStore.loadUniqueToken(tokenId2, serial2)).willReturn(nft2);
+ given(tokenStore.loadUniqueToken(tokenId1, serial1)).willReturn(nft1);
+ given(tokenStore.loadUniqueToken(tokenId1, serial2)).willReturn(nft2);
+ given(tokenStore.loadUniqueToken(tokenId1, serial3)).willReturn(nft3);
+ given(tokenStore.loadUniqueToken(tokenId2, serial1)).willReturn(nft4);
+ given(tokenStore.loadUniqueToken(tokenId2, serial2)).willReturn(nft5);
+
+ givenValidTxnCtx();
+
+ // ApproveForALl is not set. It should not be considered as false
+ final NftAllowance nftAllowance = NftAllowance.newBuilder()
+ .setSpender(spender1)
+ .setOwner(owner)
+ .setTokenId(token2)
+ .addAllSerialNumbers(List.of(serial1)).build();
+ nftAllowances.add(nftAllowance);
+
+ subject.adjustNftAllowances(nftAllowances, ownerAcccount);
+
+ assertEquals(1, ownerAcccount.getApprovedForAllNftsAllowances().size());
+
+ final NftAllowance nftAllowance1 = NftAllowance.newBuilder()
+ .setSpender(spender1)
+ .setOwner(owner)
+ .setTokenId(token2)
+ .setApprovedForAll(BoolValue.of(false))
+ .addAllSerialNumbers(List.of(serial1)).build();
+ nftAllowances.add(nftAllowance1);
+
+ subject.adjustNftAllowances(nftAllowances, ownerAcccount);
+
+ assertEquals(0, ownerAcccount.getApprovedForAllNftsAllowances().size());
+ }
+
private void setUpOwnerWithSomeKeys(final Account ownerAcccount) {
Map<EntityNum, Long> cryptoAllowances = new TreeMap<>();
Map<FcTokenAllowanceId, Long> tokenAllowances = new TreeMap<>();
diff --git a/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogicTest.java b/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogicTest.java
index 0ea40c3099..dc288e2425 100644
--- a/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogicTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogicTest.java
@@ -64,6 +64,7 @@ import java.util.TreeSet;
import static com.hedera.test.utils.IdUtils.asAccount;
import static com.hedera.test.utils.IdUtils.asToken;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALLOWANCE_OWNER_ID;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALLOWANCE_SPENDER_ID;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MAX_ALLOWANCES_EXCEEDED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -297,6 +298,40 @@ class CryptoApproveAllowanceTransitionLogicTest {
verify(txnCtx).setStatus(ResponseCodeEnum.SUCCESS);
}
+ @Test
+ void checkIfApproveForAllIsSet(){
+ final NftAllowance nftAllowance = NftAllowance.newBuilder()
+ .setSpender(spender1)
+ .setOwner(ownerId)
+ .setTokenId(token2)
+ .addAllSerialNumbers(List.of(serial1)).build();
+ final NftAllowance nftAllowance1 = NftAllowance.newBuilder()
+ .setSpender(spender1)
+ .setOwner(ownerId)
+ .setTokenId(token2)
+ .setApprovedForAll(BoolValue.of(false))
+ .addAllSerialNumbers(List.of(serial1)).build();
+ nftAllowances.add(nftAllowance);
+ nftAllowances.add(nftAllowance1);
+
+ var ownerAcccount = new Account(Id.fromGrpcAccount(ownerId));
+
+ givenValidTxnCtx();
+
+ given(accountStore.loadAccountOrFailWith(spenderId1, INVALID_ALLOWANCE_SPENDER_ID))
+ .willReturn(payerAcccount);
+ ownerAcccount.setCryptoAllowances(new TreeMap<>());
+ ownerAcccount.setFungibleTokenAllowances(new TreeMap<>());
+ ownerAcccount.setApproveForAllNfts(new TreeSet<>());
+ given(dynamicProperties.maxAllowanceLimitPerAccount()).willReturn(100);
+ given(tokenStore.loadUniqueToken(tokenId2, serial1)).willReturn(nft1);
+ given(tokenStore.loadUniqueToken(tokenId2, serial2)).willReturn(nft2);
+
+ subject.applyNftAllowances(nftAllowances, ownerAcccount);
+
+ assertEquals(1, ownerAcccount.getApprovedForAllNftsAllowances().size());
+ }
+
private void setUpOwnerWithExistingKeys(final Account ownerAcccount) {
Map<EntityNum, Long> cryptoAllowances = new TreeMap<>();
Map<FcTokenAllowanceId, Long> tokenAllowances = new TreeMap<>(); | ['hedera-node/src/main/java/com/hedera/services/store/AccountStore.java', 'hedera-node/src/test/java/com/hedera/services/store/models/AccountTest.java', 'hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogic.java', 'hedera-node/src/main/java/com/hedera/services/store/models/Account.java', 'hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogicTest.java', 'hedera-node/src/main/java/com/hedera/services/txns/crypto/CryptoAdjustAllowanceTransitionLogic.java', 'hedera-node/src/test/java/com/hedera/services/txns/crypto/CryptoApproveAllowanceTransitionLogicTest.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 8,212,016 | 2,003,198 | 231,236 | 1,458 | 2,829 | 685 | 44 | 4 | 462 | 68 | 115 | 14 | 0 | 0 | 1970-01-01T00:27:29 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,456 | hashgraph/hedera-services/1417/1404 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1404 | https://github.com/hashgraph/hedera-services/pull/1417 | https://github.com/hashgraph/hedera-services/pull/1417 | 1 | closes | ExpiryManager needs access to mutable schedules FCM to rebuild on reconnect | **Summary of the defect**
If Services restarts from a saved state, the `ExpiryManager` is currently injected with the **object reference** of the `schedules` FCM from this state. If the restarted node is behind, and must reconnect, this results in the schedule entity expiration queue being rebuilt from the wrong FCM.
**Suggested fix**
Inject a `Supplier` providing the mutable `schedules` FCM. | 2e48ee116fc278132e89ddafaa8a9130ba43930d | 6667c1f0f8979668dcce017c75db3a0756e76fbd | https://github.com/hashgraph/hedera-services/compare/2e48ee116fc278132e89ddafaa8a9130ba43930d...6667c1f0f8979668dcce017c75db3a0756e76fbd | diff --git a/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java b/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
index 11a8d42812..50d856e7e8 100644
--- a/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
+++ b/hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java
@@ -1502,7 +1502,7 @@ public class ServicesContext {
public ExpiryManager expiries() {
if (expiries == null) {
var histories = txnHistories();
- expiries = new ExpiryManager(recordCache(), histories, scheduleStore(), schedules());
+ expiries = new ExpiryManager(recordCache(), histories, scheduleStore(), this::schedules);
}
return expiries;
}
diff --git a/hedera-node/src/main/java/com/hedera/services/state/expiry/ExpiryManager.java b/hedera-node/src/main/java/com/hedera/services/state/expiry/ExpiryManager.java
index 31e031ca01..b29157ebc3 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/expiry/ExpiryManager.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/expiry/ExpiryManager.java
@@ -41,11 +41,12 @@ import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
+import java.util.function.Supplier;
public class ExpiryManager {
private final RecordCache recordCache;
private final Map<TransactionID, TxnIdRecentHistory> txnHistories;
- private final FCMap<MerkleEntityId, MerkleSchedule> schedules;
+ private final Supplier<FCMap<MerkleEntityId, MerkleSchedule>> schedules;
private final ScheduleStore scheduleStore;
@@ -57,7 +58,7 @@ public class ExpiryManager {
RecordCache recordCache,
Map<TransactionID, TxnIdRecentHistory> txnHistories,
ScheduleStore scheduleStore,
- FCMap<MerkleEntityId, MerkleSchedule> schedules
+ Supplier<FCMap<MerkleEntityId, MerkleSchedule>> schedules
) {
this.recordCache = recordCache;
this.txnHistories = txnHistories;
@@ -94,7 +95,7 @@ public class ExpiryManager {
entityExpiries.reset();
var expiries = new ArrayList<Map.Entry<Pair<Long, Consumer<EntityId>>, Long>>();
- schedules.forEach((id, schedule) -> {
+ schedules.get().forEach((id, schedule) -> {
Consumer<EntityId> consumer = scheduleStore::expire;
var pair = Pair.of(id.getNum(), consumer);
expiries.add(new AbstractMap.SimpleImmutableEntry<>(pair, schedule.expiry()));
diff --git a/hedera-node/src/test/java/com/hedera/services/state/expiry/ExpiryManagerTest.java b/hedera-node/src/test/java/com/hedera/services/state/expiry/ExpiryManagerTest.java
index 4a04cc55dd..d576d7dd40 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/expiry/ExpiryManagerTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/expiry/ExpiryManagerTest.java
@@ -102,7 +102,7 @@ class ExpiryManagerTest {
given(expiringEntity.getKey()).willReturn(schedule.getScheduleNum());
given(expiringEntity.getValue()).willReturn(entityIdConsumer);
- subject = new ExpiryManager(recordCache, txnHistories, scheduleStore, schedules);
+ subject = new ExpiryManager(recordCache, txnHistories, scheduleStore, () -> schedules);
}
@Test
@@ -155,7 +155,7 @@ class ExpiryManagerTest {
txnHistories = mock(Map.class);
// given:
- subject = new ExpiryManager(recordCache, txnHistories, scheduleStore, schedules);
+ subject = new ExpiryManager(recordCache, txnHistories, scheduleStore, () -> schedules);
// and:
subject.trackRecord(payer, oldExpiry);
// and: | ['hedera-node/src/test/java/com/hedera/services/state/expiry/ExpiryManagerTest.java', 'hedera-node/src/main/java/com/hedera/services/context/ServicesContext.java', 'hedera-node/src/main/java/com/hedera/services/state/expiry/ExpiryManager.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 6,557,543 | 1,580,963 | 184,786 | 1,151 | 561 | 140 | 9 | 2 | 400 | 60 | 89 | 5 | 0 | 0 | 1970-01-01T00:27:01 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,457 | hashgraph/hedera-services/1413/1405 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1405 | https://github.com/hashgraph/hedera-services/pull/1413 | https://github.com/hashgraph/hedera-services/pull/1413 | 1 | close | Always use (oldExpiration + effectiveRenewalPeriod) for newExpiration in auto-renew | **Summary of the defect**
The `RenewalProcess` is using (consensusTime + effectiveRenewalPeriod) as the new expiration in the record for an auto-renewed account.
This is a mismatch with the actual new expiration that uses the (oldExpiration + effectiveRenewalPeriod).
**Suggested fix**
Just use (oldExpiration + effectiveRenewalPeriod) for both. | 618a8b16dc0b59027d65033c690dc6a0a093b973 | cc1d688d31159b2b40bab20a6bcbe5c0f100995b | https://github.com/hashgraph/hedera-services/compare/618a8b16dc0b59027d65033c690dc6a0a093b973...cc1d688d31159b2b40bab20a6bcbe5c0f100995b | diff --git a/hedera-node/src/main/java/com/hedera/services/state/expiry/renewal/RenewalProcess.java b/hedera-node/src/main/java/com/hedera/services/state/expiry/renewal/RenewalProcess.java
index 6e2beebaf6..f3b997397c 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/expiry/renewal/RenewalProcess.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/expiry/renewal/RenewalProcess.java
@@ -99,7 +99,7 @@ public class RenewalProcess {
final long renewalFee = usageAssessment.fee();
helper.renewLastClassifiedWith(renewalFee, effPeriod);
- recordsHelper.streamCryptoRenewal(accountId, renewalFee, longNow + effPeriod);
+ recordsHelper.streamCryptoRenewal(accountId, renewalFee, lastClassified.getExpiry() + effPeriod);
}
private void processDetachedAccountGracePeriodOver(MerkleEntityId accountId) {
diff --git a/hedera-node/src/test/java/com/hedera/services/state/expiry/renewal/RenewalProcessTest.java b/hedera-node/src/test/java/com/hedera/services/state/expiry/renewal/RenewalProcessTest.java
index 21cdd0cef8..f10b2acf27 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/expiry/renewal/RenewalProcessTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/expiry/renewal/RenewalProcessTest.java
@@ -204,6 +204,6 @@ class RenewalProcessTest {
// then:
assertTrue(wasTouched);
verify(helper).renewLastClassifiedWith(fee, actualRenewalPeriod);
- verify(recordsHelper).streamCryptoRenewal(key, fee, now + actualRenewalPeriod);
+ verify(recordsHelper).streamCryptoRenewal(key, fee, now - 1 + actualRenewalPeriod);
}
} | ['hedera-node/src/test/java/com/hedera/services/state/expiry/renewal/RenewalProcessTest.java', 'hedera-node/src/main/java/com/hedera/services/state/expiry/renewal/RenewalProcess.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,557,524 | 1,580,958 | 184,786 | 1,151 | 182 | 47 | 2 | 1 | 354 | 46 | 85 | 7 | 0 | 0 | 1970-01-01T00:27:01 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,458 | hashgraph/hedera-services/1385/1365 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1365 | https://github.com/hashgraph/hedera-services/pull/1385 | https://github.com/hashgraph/hedera-services/pull/1385 | 1 | closes | AllProtectedFilesUpdate-NIReconnect-1-12m: ReconnectStreamValidator failed with IndexOutOfBoundException | **Summary of the defect**
There are two errors as listed below. They don't seem to be related, but needs more investigation to confirm.
**How to reproduce (if possible)**
slack link: https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1620199578241600
Summary link: https://hedera-hashgraph.slack.com/archives/C018Y4E6ADT/p1620211779064500
**Service logs (if applicable)**
```
---- ReconnectStreamValidator failed with an exception:
java.lang.IndexOutOfBoundsException: index should be within [0, -1]
at com.swirlds.regression.validators.StreamSigsInANode.getSigFileName(StreamSigsInANode.java:50)
at com.swirlds.regression.validators.ReconnectStreamValidator.validateNode(ReconnectStreamValidator.java:115)
at com.swirlds.regression.validators.ReconnectStreamValidator.validate(ReconnectStreamValidator.java:88)
at com.swirlds.regression.Experiment.doValidation(Experiment.java:692)
at com.swirlds.regression.Experiment.validateTest(Experiment.java:617)
at com.swirlds.regression.Experiment.runRemoteExperiment(Experiment.java:1118)
at com.swirlds.regression.RegressionMain.runExperiments(RegressionMain.java:271)
at com.swirlds.regression.RegressionMain.RunCloudExperiment(RegressionMain.java:145)
at com.swirlds.regression.RegressionMain.RunRegression(RegressionMain.java:117)
at com.swirlds.regression.RegressionMain.main(RegressionMain.java:538)
...
```
Another error from client log:
```
2021-05-05 07:16:57.116 INFO 311 HapiApiSpec - 'UpdateAllProtectedFilesDuringReconnect' finished initial execution of CustomSpecAssert
2021-05-05 07:17:09.660 INFO 208 HapiSpecOperation - Node 0.0.8 is unavailable as expected!
2021-05-05 07:17:09.660 INFO 311 HapiApiSpec - 'UpdateAllProtectedFilesDuringReconnect' finished initial execution of HapiGetAccountBalance{node=0.0.8, account=GENESIS}
2021-05-05 07:17:09.660 INFO 64 NodeLivenessTimeout - Requiring node 0.0.8 to be available in 300 SECONDS
2021-05-05 07:17:49.662 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 10 SECONDS
2021-05-05 07:18:19.664 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 20 SECONDS
2021-05-05 07:18:49.867 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 50 SECONDS
2021-05-05 07:19:19.893 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 80 SECONDS
2021-05-05 07:19:49.919 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 110 SECONDS
2021-05-05 07:20:19.948 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 140 SECONDS
2021-05-05 07:20:49.976 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 170 SECONDS
2021-05-05 07:21:20.006 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 200 SECONDS
2021-05-05 07:21:50.031 INFO 83 NodeLivenessTimeout - --> Node 0.0.8 not available after 230 SECONDS
2021-05-05 07:22:10.051 WARN 214 HapiSpecOperation - 'UpdateAllProtectedFilesDuringReconnect' - NodeLivenessTimeoutNode 0.0.8 wasn't available in 300 SECONDS! failed {}!
2021-05-05 07:22:10.122 INFO 330 HapiApiSpec - 'UpdateAllProtectedFilesDuringReconnect' - final status: FAILED!
2021-05-05 07:22:10.123 INFO 174 UpdateAllProtectedFilesDuringReconnect - -------------- RESULTS OF UpdateAllProtectedFilesDuringReconnect SUITE --------------
2021-05-05 07:22:10.123 INFO 176 UpdateAllProtectedFilesDuringReconnect - Spec{name=UpdateAllProtectedFilesDuringReconnect, status=FAILED}
2021-05-05 07:22:10.129 INFO 526 SuiteRunner - ============== sync run results ==============
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| 8306c41975eff2161ad171074d64881bdfcde253 | d5937c00017444a073a3d46c09ce0fb28aec6cc4 | https://github.com/hashgraph/hedera-services/compare/8306c41975eff2161ad171074d64881bdfcde253...d5937c00017444a073a3d46c09ce0fb28aec6cc4 | diff --git a/hedera-node/src/main/java/com/hedera/services/queries/validation/QueryFeeCheck.java b/hedera-node/src/main/java/com/hedera/services/queries/validation/QueryFeeCheck.java
index 263eecd777..9e947c0e31 100644
--- a/hedera-node/src/main/java/com/hedera/services/queries/validation/QueryFeeCheck.java
+++ b/hedera-node/src/main/java/com/hedera/services/queries/validation/QueryFeeCheck.java
@@ -70,7 +70,7 @@ public class QueryFeeCheck {
}
// number of beneficiaries in query transfer transaction can be greater than one.
// validate if node gets the required query payment
- if (transfers.stream().noneMatch(adj -> adj.getAmount() > 0 && adj.getAccountID().equals(node))) {
+ if (transfers.stream().noneMatch(adj -> adj.getAmount() >= 0 && adj.getAccountID().equals(node))) {
return INVALID_RECEIVING_NODE_ACCOUNT;
}
if (transfers.stream().anyMatch(adj -> adj.getAccountID().equals(node) && adj.getAmount() < queryFee)) {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/HapiQueryOp.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/HapiQueryOp.java
index e379661d59..39230a6bfb 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/HapiQueryOp.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/HapiQueryOp.java
@@ -24,7 +24,9 @@ import com.hedera.services.bdd.spec.HapiPropertySource;
import com.hedera.services.bdd.spec.exceptions.HapiQueryCheckStateException;
import com.hedera.services.bdd.spec.exceptions.HapiQueryPrecheckStateException;
import com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer;
+import com.hedera.services.usage.crypto.CryptoTransferUsage;
import com.hederahashgraph.api.proto.java.CryptoTransferTransactionBody;
+import com.hederahashgraph.api.proto.java.FeeData;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.Key;
import com.hederahashgraph.api.proto.java.Response;
@@ -39,6 +41,7 @@ import com.hedera.services.bdd.spec.fees.Payment;
import com.hedera.services.bdd.spec.keys.ControlForKey;
import com.hedera.services.bdd.spec.keys.SigMapGenerator;
import com.hedera.services.bdd.spec.stats.QueryObs;
+import com.hederahashgraph.fee.SigValueObj;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.Assert;
@@ -207,8 +210,15 @@ public abstract class HapiQueryOp<T extends HapiQueryOp<T>> extends HapiSpecOper
protected long feeFor(HapiApiSpec spec, Transaction txn, int numPayerKeys) throws Throwable {
return spec.fees().forActivityBasedOp(
HederaFunctionality.CryptoTransfer,
- cryptoFees::getCryptoTransferTxFeeMatrices,
- txn, numPayerKeys);
+ (_txn, _svo) -> usageEstimate(_txn, _svo, spec.fees().tokenTransferUsageMultiplier()),
+ txn,
+ numPayerKeys);
+ }
+
+ private FeeData usageEstimate(TransactionBody txn, SigValueObj svo, int multiplier) {
+ return CryptoTransferUsage.newEstimate(txn, suFrom(svo))
+ .givenTokenMultiplier(multiplier)
+ .get();
}
private Transaction fittedPayment(HapiApiSpec spec) throws Throwable { | ['hedera-node/src/main/java/com/hedera/services/queries/validation/QueryFeeCheck.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/queries/HapiQueryOp.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 6,554,547 | 1,580,240 | 184,708 | 1,151 | 764 | 208 | 16 | 2 | 3,916 | 356 | 1,202 | 61 | 2 | 2 | 1970-01-01T00:27:00 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,463 | hashgraph/hedera-services/1245/1236 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1236 | https://github.com/hashgraph/hedera-services/pull/1245 | https://github.com/hashgraph/hedera-services/pull/1245 | 1 | closes | MixedOps-Restart-Performance-10k-31m 20N_20C test failure | **Summary of the defect**
`MixedOps-Restart-Performance-10k-31m ` starts successfully, but fails as the schedules to be created already exist and receives a wrong actual status. Need to modify the client to continue the test irrespective of received status.
**How to reproduce (if possible)**
Slack Summary link : https://hedera-hashgraph.slack.com/archives/C018Y4E6ADT/p1617764642017000
Slack link : https://hedera-hashgraph.slack.com/archives/CKWHL8R9A/p1617764591218400
| 168e43fa5843e9dc5465413c27da28ca036e49d7 | 60a05451dbaf5da02bbb09a62a0997cd969cea8d | https://github.com/hashgraph/hedera-services/compare/168e43fa5843e9dc5465413c27da28ca036e49d7...60a05451dbaf5da02bbb09a62a0997cd969cea8d | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiSpecRegistry.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiSpecRegistry.java
index 2103fb04fa..18e5933f26 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiSpecRegistry.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiSpecRegistry.java
@@ -31,6 +31,8 @@ import com.hedera.services.bdd.spec.infrastructure.meta.SupportedContract;
import com.hedera.services.bdd.spec.stats.OpObs;
import com.hedera.services.bdd.spec.stats.ThroughputObs;
import com.hedera.services.bdd.suites.HapiApiSuite;
+import com.hedera.services.legacy.core.HexUtils;
+import com.hedera.services.legacy.core.KeyPairObj;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ConsensusCreateTopicTransactionBody;
import com.hederahashgraph.api.proto.java.ConsensusUpdateTopicTransactionBody;
@@ -47,13 +49,17 @@ import com.hederahashgraph.api.proto.java.Timestamp;
import com.hederahashgraph.api.proto.java.TokenID;
import com.hederahashgraph.api.proto.java.TopicID;
import com.hederahashgraph.api.proto.java.TransactionID;
-import com.hedera.services.legacy.core.HexUtils;
-import com.hedera.services.legacy.core.KeyPairObj;
import com.hederahashgraph.api.proto.java.TransactionRecord;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import java.io.*;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.NotSerializableException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.HashMap;
@@ -66,7 +72,9 @@ import static com.hedera.services.bdd.spec.HapiPropertySource.asAccountString;
import static com.hedera.services.bdd.spec.HapiPropertySource.asScheduleString;
import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString;
import static com.hedera.services.bdd.spec.keys.KeyFactory.firstStartupKp;
-import static java.util.stream.Collectors.*;
+import static java.util.stream.Collectors.counting;
+import static java.util.stream.Collectors.groupingBy;
+import static java.util.stream.Collectors.toList;
public class HapiSpecRegistry {
static final Logger log = LogManager.getLogger(HapiSpecRegistry.class);
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
index f5e67f5110..353d778886 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java
@@ -36,7 +36,7 @@ public interface OpProvider {
OK,
BUSY,
INSUFFICIENT_TX_FEE,
- PLATFORM_TRANSACTION_NOT_CREATED,
+ PLATFORM_TRANSACTION_NOT_CREATED
};
ResponseCodeEnum[] STANDARD_PERMISSIBLE_PRECHECKS = {
@@ -46,7 +46,7 @@ public interface OpProvider {
DUPLICATE_TRANSACTION,
INVALID_PAYER_SIGNATURE,
INSUFFICIENT_PAYER_BALANCE,
- PLATFORM_TRANSACTION_NOT_CREATED,
+ PLATFORM_TRANSACTION_NOT_CREATED
};
ResponseCodeEnum[] STANDARD_PERMISSIBLE_OUTCOMES = {
@@ -54,7 +54,7 @@ public interface OpProvider {
LIVE_HASH_NOT_FOUND,
INVALID_SIGNATURE,
INSUFFICIENT_PAYER_BALANCE,
- UNKNOWN,
+ UNKNOWN
};
default List<HapiSpecOperation> suggestedInitializers() {
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
index cee78eddb8..1edae28c76 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java
@@ -23,9 +23,7 @@ package com.hedera.services.bdd.spec.queries.schedule;
import com.hedera.services.bdd.spec.HapiApiSpec;
import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.queries.HapiQueryOp;
-import com.hedera.services.bdd.spec.queries.QueryVerbs;
import com.hedera.services.bdd.spec.transactions.TxnUtils;
-import com.hedera.services.bdd.spec.utilops.CustomSpecAssert;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.KeyList;
import com.hederahashgraph.api.proto.java.Query;
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleSign.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleSign.java
index 5ab531178e..e8263c95bf 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleSign.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleSign.java
@@ -22,13 +22,13 @@ package com.hedera.services.bdd.spec.transactions.schedule;
import com.google.common.base.MoreObjects;
import com.hedera.services.bdd.spec.HapiApiSpec;
-import com.hedera.services.bdd.spec.HapiPropertySource;
import com.hedera.services.bdd.spec.fees.FeeCalculator;
-import com.hedera.services.bdd.spec.queries.schedule.HapiGetScheduleInfo;
+import com.hedera.services.bdd.spec.infrastructure.RegistryNotFound;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.suites.HapiApiSuite;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.Key;
+import com.hederahashgraph.api.proto.java.ScheduleID;
import com.hederahashgraph.api.proto.java.ScheduleInfo;
import com.hederahashgraph.api.proto.java.ScheduleSignTransactionBody;
import com.hederahashgraph.api.proto.java.Transaction;
@@ -40,11 +40,9 @@ import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function;
-import static com.hedera.services.bdd.spec.queries.QueryVerbs.getScheduleInfo;
import static com.hedera.services.bdd.spec.transactions.TxnUtils.asScheduleId;
import static com.hedera.services.bdd.spec.transactions.TxnUtils.suFrom;
import static com.hederahashgraph.api.proto.java.HederaFunctionality.ScheduleSign;
@@ -54,16 +52,22 @@ public class HapiScheduleSign extends HapiTxnOp<HapiScheduleSign> {
private final String schedule;
private List<String> signatories = Collections.emptyList();
+ private boolean ignoreMissing = false;
public HapiScheduleSign(String schedule) {
this.schedule = schedule;
}
- public HapiScheduleSign alsoSigningWith(String... keys) {
+ public HapiScheduleSign alsoSigningWith(String... keys) {
signatories = List.of(keys);
return this;
}
+ public HapiScheduleSign ignoreIfMissing() {
+ ignoreMissing = true;
+ return this;
+ }
+
@Override
protected HapiScheduleSign self() {
return this;
@@ -80,7 +84,15 @@ public class HapiScheduleSign extends HapiTxnOp<HapiScheduleSign> {
.txns()
.<ScheduleSignTransactionBody, ScheduleSignTransactionBody.Builder>body(
ScheduleSignTransactionBody.class, b -> {
- b.setScheduleID(asScheduleId(schedule, spec));
+ ScheduleID id;
+ try {
+ id = asScheduleId(schedule, spec);
+ b.setScheduleID(id);
+ } catch (RegistryNotFound e) {
+ if (!ignoreMissing) {
+ throw e;
+ }
+ }
}
);
return b -> b.setScheduleSign(opBody);
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/MixedOpsLoadTest.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/MixedOpsLoadTest.java
index d35ae2a779..fec0fc5f46 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/MixedOpsLoadTest.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/MixedOpsLoadTest.java
@@ -28,6 +28,7 @@ import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.time.Instant;
import java.util.List;
@@ -69,6 +70,7 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOPIC_
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PAYER_ACCOUNT_NOT_FOUND;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PLATFORM_TRANSACTION_NOT_CREATED;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SCHEDULE_ALREADY_EXECUTED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT;
@@ -178,7 +180,8 @@ public class MixedOpsLoadTest extends LoadTest {
INVALID_TOKEN_ID,
UNKNOWN, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT)
.deferStatusResolution() :
- scheduleSign(schedule + r.nextInt(NUM_SUBMISSIONS))
+ scheduleSign(schedule + "-" + getHostName() + "-" + r.nextInt(NUM_SUBMISSIONS))
+ .ignoreIfMissing()
.noLogging()
.alsoSigningWith(receiver)
.hasPrecheckFrom(OK, INVALID_SCHEDULE_ID)
@@ -186,7 +189,8 @@ public class MixedOpsLoadTest extends LoadTest {
OK,
TRANSACTION_EXPIRED,
INVALID_SCHEDULE_ID,
- UNKNOWN)
+ UNKNOWN,
+ SCHEDULE_ALREADY_EXECUTED)
.fee(ONE_HBAR)
.deferStatusResolution()
};
@@ -236,13 +240,15 @@ public class MixedOpsLoadTest extends LoadTest {
sleepFor(10000),
inParallel(IntStream.range(0, NUM_SUBMISSIONS)
.mapToObj(ignore ->
- scheduleCreate("schedule" + scheduleId.getAndIncrement(),
+ scheduleCreate("schedule-" + getHostName() + "-" +
+ scheduleId.getAndIncrement(),
cryptoTransfer(tinyBarsFromTo(sender, receiver, 1))
)
.signedBy(DEFAULT_PAYER)
.fee(ONE_HUNDRED_HBARS)
.alsoSigningWith(sender)
.hasPrecheckFrom(STANDARD_PERMISSIBLE_PRECHECKS)
+ .hasAnyKnownStatus()
.deferStatusResolution()
.adminKey(DEFAULT_PAYER)
.noLogging())
@@ -275,4 +281,13 @@ public class MixedOpsLoadTest extends LoadTest {
protected Logger getResultsLogger() {
return log;
}
+
+ private String getHostName() {
+ try {
+ return InetAddress.getLocalHost().getHostName();
+ } catch (Exception e) {
+ log.info("Error getting host name");
+ return "Hostname-Not-Available";
+ }
+ }
} | ['test-clients/src/main/java/com/hedera/services/bdd/spec/queries/schedule/HapiGetScheduleInfo.java', 'test-clients/src/main/java/com/hedera/services/bdd/suites/perf/MixedOpsLoadTest.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/OpProvider.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/infrastructure/HapiSpecRegistry.java', 'test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/schedule/HapiScheduleSign.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 6,272,810 | 1,507,181 | 177,231 | 1,116 | 2,659 | 575 | 69 | 5 | 480 | 51 | 127 | 7 | 2 | 0 | 1970-01-01T00:26:57 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,466 | hashgraph/hedera-services/1165/1142 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1142 | https://github.com/hashgraph/hedera-services/pull/1165 | https://github.com/hashgraph/hedera-services/pull/1165 | 1 | closes | Export account balances every exportPeriodSecs since the epoch (in consensus time) | **Summary of the defect**
The account balances file should be exported each time the signed state timestamp enters a new "period", where the timeline is broken into periods of length `balances.exportPeriodSecs` beginning with the epoch.
Also:
- [ ] Remove the timestamp for client saved .pb file | 0ad046698887d5f9cfff3f534dc7aff7625388da | c826a346116f9287be42d9fb9196d3c783c15530 | https://github.com/hashgraph/hedera-services/compare/0ad046698887d5f9cfff3f534dc7aff7625388da...c826a346116f9287be42d9fb9196d3c783c15530 | diff --git a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
index e50a3e7329..b669f61201 100644
--- a/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
+++ b/hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java
@@ -94,7 +94,8 @@ public class SignedStateBalancesExporter implements BalancesExporter {
private String lastUsedExportDir = UNKNOWN_EXPORT_DIR;
private BalancesSummary summary;
- Instant periodEnd = NEVER;
+ Instant periodBegin = NEVER;
+ private final int exportPeriod;
static final Comparator<SingleAccountBalances> SINGLE_ACCOUNT_BALANCES_COMPARATOR =
Comparator.comparing(SingleAccountBalances::getAccountID, ACCOUNT_ID_COMPARATOR);
@@ -107,21 +108,21 @@ public class SignedStateBalancesExporter implements BalancesExporter {
this.signer = signer;
this.expectedFloat = properties.getLongProperty("ledger.totalTinyBarFloat");
this.dynamicProperties = dynamicProperties;
+ exportPeriod = dynamicProperties.balancesExportPeriodSecs();
}
@Override
public boolean isTimeToExport(Instant now) {
- if (periodEnd == NEVER) {
- periodEnd = now.plusSeconds(dynamicProperties.balancesExportPeriodSecs());
- } else {
- if (now.isAfter(periodEnd)) {
- periodEnd = now.plusSeconds(dynamicProperties.balancesExportPeriodSecs());
- return true;
- }
+ if (periodBegin != NEVER
+ && now.getEpochSecond() / exportPeriod != periodBegin.getEpochSecond() / exportPeriod) {
+ periodBegin = now;
+ return true;
}
+ periodBegin = now;
return false;
}
+
@Override
public void exportBalancesFrom(ServicesState signedState, Instant when) {
if (!ensureExportDir(signedState.getNodeAccountId())) {
@@ -136,12 +137,14 @@ public class SignedStateBalancesExporter implements BalancesExporter {
when, summary.getTotalFloat(), expectedFloat)); }
log.info("Took {}ms to summarize signed state balances", watch.getTime(TimeUnit.MILLISECONDS));
- if (exportCsv) {
- toCsvFile(when);
- }
+ // .pb account balances file is our focus, process it first to let its timestamp to stay close to
+ // epoch export period boundary
if (exportProto) {
toProtoFile(when);
}
+ if (exportCsv) {
+ toCsvFile(when);
+ }
}
private void toCsvFile(Instant exportTimeStamp) {
diff --git a/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java b/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
index 5f9aecfe6f..5c8d4b5793 100644
--- a/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
+++ b/hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java
@@ -20,7 +20,6 @@ package com.hedera.services.state.exports;
*
*/
-import com.google.protobuf.InvalidProtocolBufferException;
import com.hedera.services.ServicesState;
import com.hedera.services.config.MockGlobalDynamicProps;
import com.hedera.services.context.properties.GlobalDynamicProperties;
@@ -36,7 +35,6 @@ import com.hederahashgraph.api.proto.java.AccountID;
import com.hedera.services.stream.proto.AllAccountBalances;
import com.hedera.services.stream.proto.SingleAccountBalances;
import com.hedera.services.stream.proto.TokenUnitBalance;
-import com.hederahashgraph.api.proto.java.TokenBalances;
import com.hederahashgraph.api.proto.java.TokenID;
import com.swirlds.common.Address;
@@ -56,8 +54,6 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
-import java.util.Base64;
-import java.util.Base64.Decoder;
import java.util.Comparator;
import java.util.List;
import java.util.function.UnaryOperator;
@@ -626,17 +622,29 @@ class SignedStateBalancesExporterTest {
public void initsAsExpected() {
// expect:
assertEquals(ledgerFloat, subject.expectedFloat);
- assertEquals(SignedStateBalancesExporter.NEVER, subject.periodEnd);
}
@Test
public void exportsWhenPeriodSecsHaveElapsed() {
- assertFalse(subject.isTimeToExport(now));
- assertEquals(now.plusSeconds(dynamicProperties.balancesExportPeriodSecs()), subject.periodEnd);
+ subject = new SignedStateBalancesExporter(properties, signer, dynamicProperties);
+ Instant startTime = Instant.parse("2021-03-11T10:59:59.0Z");
+ assertFalse(subject.isTimeToExport(startTime));
+ assertEquals(startTime, subject.periodBegin);
+ assertTrue(subject.isTimeToExport(startTime.plusSeconds(1)));
+ assertEquals(startTime.plusSeconds(1), subject.periodBegin);
+
+ shortlyAfter = startTime.plusSeconds(dynamicProperties.balancesExportPeriodSecs() / 2);
assertFalse(subject.isTimeToExport(shortlyAfter));
- assertEquals(now.plusSeconds(dynamicProperties.balancesExportPeriodSecs()), subject.periodEnd);
+ assertEquals(shortlyAfter, subject.periodBegin);
+
+ Instant nextPeriod = startTime.plusSeconds(dynamicProperties.balancesExportPeriodSecs() + 1);
+
+ assertTrue(subject.isTimeToExport(nextPeriod));
+ assertEquals(nextPeriod, subject.periodBegin );
+
+ anEternityLater = startTime.plusSeconds(dynamicProperties.balancesExportPeriodSecs() * 2 + 1);
assertTrue(subject.isTimeToExport(anEternityLater));
- assertEquals(anEternityLater.plusSeconds(dynamicProperties.balancesExportPeriodSecs()), subject.periodEnd);
+ assertEquals(anEternityLater, subject.periodBegin);
}
@AfterAll
diff --git a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
index 0c459c9380..b49cddbdd8 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java
@@ -146,13 +146,8 @@ public class HapiApiSpec implements Runnable {
public void exportAccountBalances(Supplier<String> dir) {
- Instant now = Instant.now();
- Timestamp.Builder timeStamp = Timestamp.newBuilder();
- timeStamp.setSeconds(now.getEpochSecond())
- .setNanos(now.getNano());
-
AllAccountBalances.Builder allAccountBalancesBuilder = AllAccountBalances.newBuilder()
- .addAllAllAccounts(accountBalances).setConsensusTimestamp(timeStamp);
+ .addAllAllAccounts(accountBalances);
try (FileOutputStream fout = new FileOutputStream(dir.get())) {
allAccountBalancesBuilder.build().writeTo(fout); | ['test-clients/src/main/java/com/hedera/services/bdd/spec/HapiApiSpec.java', 'hedera-node/src/test/java/com/hedera/services/state/exports/SignedStateBalancesExporterTest.java', 'hedera-node/src/main/java/com/hedera/services/state/exports/SignedStateBalancesExporter.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 6,223,610 | 1,494,110 | 175,661 | 1,105 | 1,113 | 278 | 32 | 2 | 302 | 47 | 60 | 6 | 0 | 0 | 1970-01-01T00:26:55 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
1,461 | hashgraph/hedera-services/1276/1269 | hashgraph | hedera-services | https://github.com/hashgraph/hedera-services/issues/1269 | https://github.com/hashgraph/hedera-services/pull/1276 | https://github.com/hashgraph/hedera-services/pull/1276 | 1 | closes | JRS accountbalances validation test has StackOverflowError | **Summary of the defect**
This JRS regression test has error stack overflow error message as listed below.
**How to reproduce (if possible)**
Check out this slack link: https://hedera-hashgraph.slack.com/archives/C018CBQBTGB/p1618423668352300
**Service logs (if applicable)**
```
...
java.util.concurrent.CompletionException: java.lang.StackOverflowError
at java.base/java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:314)
at java.base/java.util.concurrent.CompletableFuture.completeThrowable(CompletableFuture.java:319)
at java.base/java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1810)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:835)
Caused by: java.lang.StackOverflowError
at java.base/java.util.regex.Pattern$BranchConn.match(Pattern.java:4700)
at java.base/java.util.regex.Pattern$GroupTail.match(Pattern.java:4850)
at java.base/java.util.regex.Pattern$BmpCharPropertyGreedy.match(Pattern.java:4331)
at java.base/java.util.regex.Pattern$GroupHead.match(Pattern.java:4791)
at java.base/java.util.regex.Pattern$Branch.match(Pattern.java:4736)
at java.base/java.util.regex.Pattern$Branch.match(Pattern.java:4734)
```
in hapi client log:
```
2021-04-14 17:03:33.666 ERROR 68 HapiSpecOperation - Problem(s) with sub-operation(s): HapiCryptoCreate{sigs=1, node=0.0.3, account=acct-299955, balance=3116776744} :: Wrong precheck status! Expected OK, actual BUSY, HapiCryptoCreate{sigs=1, node=0.0.3, account=acct-299954} :: Wrong precheck status! Expected OK, actual BUSY, HapiCryptoCreate{sigs=1, node=0.0.3, account=acct-299952, balance=1587480112} :: Wrong precheck status! Expected OK, actual BUSY
com.hedera.services.bdd.spec.exceptions.HapiTxnPrecheckStateException: Wrong precheck status! Expected OK, actual BUSY
com.hedera.services.bdd.spec.exceptions.HapiTxnPrecheckStateException: Wrong precheck status! Expected OK, actual BUSY
com.hedera.services.bdd.spec.exceptions.HapiTxnPrecheckStateException: Wrong precheck status! Expected OK, actual BUSY
com.hedera.services.bdd.spec.exceptions.HapiTxnPrecheckStateException: Wrong precheck status! Expected OK, actual BUSY
```
**Environment:**
- OS: [e.g. Ubuntu 18.04]
- Java: [e.g. OpenJDK 11.0.4]
- Hedera Services Version: [e.g. 0.0.5]
- HAPI Version: [e.g. 0.0.5]
**Additional Context**
Add any other context about the problem here. Attach any logs here, if applicable.
| a7673ca33c161f2ef8d6b192318c33b69be4de3c | 812f380b596d4e35d74545fb465b4992c13dd0fb | https://github.com/hashgraph/hedera-services/compare/a7673ca33c161f2ef8d6b192318c33b69be4de3c...812f380b596d4e35d74545fb465b4992c13dd0fb | diff --git a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/AccountBalancesClientSaveLoadTest.java b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/AccountBalancesClientSaveLoadTest.java
index 07d240541f..bb9ad57a21 100644
--- a/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/AccountBalancesClientSaveLoadTest.java
+++ b/test-clients/src/main/java/com/hedera/services/bdd/suites/perf/AccountBalancesClientSaveLoadTest.java
@@ -21,9 +21,12 @@ package com.hedera.services.bdd.suites.perf;
*/
import com.hedera.services.bdd.spec.HapiApiSpec;
+import com.hedera.services.bdd.spec.HapiPropertySource;
import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
+import com.hedera.services.bdd.spec.infrastructure.RegistryNotFound;
import com.hedera.services.bdd.spec.utilops.LoadTest;
+import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
@@ -61,6 +64,7 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_REPEAT
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.BUSY;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.DUPLICATE_TRANSACTION;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE;
+import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PLATFORM_TRANSACTION_NOT_CREATED;
@@ -135,7 +139,7 @@ public class AccountBalancesClientSaveLoadTest extends LoadTest {
.maxOpsPerSec(() -> settings.getTps())
.maxPendingOps(() -> MAX_PENDING_OPS_FOR_SETUP)),
- sleepFor(20 * SECOND),
+ sleepFor(10 * SECOND),
sourcing(() -> runWithProvider(tokensCreate(settings))
.lasting(() -> totalTestTokens / ESTIMATED_TOKEN_CREATION_RATE + 10,
@@ -160,13 +164,21 @@ public class AccountBalancesClientSaveLoadTest extends LoadTest {
).then(
sleepFor(10 * SECOND),
withOpContext( (spec, log) -> {
- log.info("Now get all {} accounts created and save it in spec", totalAccounts);
- for(int i = totalAccounts - 1; i >=0; i-- ) {
- var op = getAccountBalance(ACCT_NAME_PREFIX + i)
+ log.info("Now get all {} accounts created and save them", totalAccounts);
+ AccountID acctID = AccountID.getDefaultInstance();
+ for(int i = 0; i < totalAccounts; i++ ) {
+ String acctName = ACCT_NAME_PREFIX + i;
+ // Make sure the named account was created before query its balances.
+ try {
+ acctID = spec.registry().getAccountID(acctName);
+ } catch (RegistryNotFound e) {
+ log.info(acctName + " was not created successfully.");
+ continue;
+ }
+ var op = getAccountBalance(HapiPropertySource.asAccountString(acctID))
.hasAnswerOnlyPrecheckFrom(permissiblePrechecks)
.persists(true)
.noLogging();
-
allRunFor(spec, op);
}
}),
@@ -188,7 +200,7 @@ public class AccountBalancesClientSaveLoadTest extends LoadTest {
log.info("Total accounts: {}", totalAccounts);
log.info("Total tokens: {}", totalTestTokens);
- AtomicInteger createdSofar = new AtomicInteger(0);
+ AtomicInteger moreToCreate = new AtomicInteger(totalAccounts );
return spec -> new OpProvider() {
@Override
@@ -200,18 +212,23 @@ public class AccountBalancesClientSaveLoadTest extends LoadTest {
@Override
public Optional<HapiSpecOperation> get() {
int next;
- if ((next = createdSofar.getAndIncrement()) >= totalAccounts) {
+ next = moreToCreate.getAndDecrement();
+ if (next <= 0) {
return Optional.empty();
}
- var op = cryptoCreate(String.format("%s%s",ACCT_NAME_PREFIX , next))
- .balance((long)(r.nextInt((int)ONE_HBAR) * 1000 + MIN_ACCOUNT_BALANCE))
+ var op = cryptoCreate(String.format("%s%d",ACCT_NAME_PREFIX , next))
+ .balance((long)(r.nextInt((int)ONE_HBAR) + MIN_ACCOUNT_BALANCE))
.key(GENESIS)
.fee(ONE_HUNDRED_HBARS)
.withRecharging()
.rechargeWindow(30)
+ .hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS)
+ .hasPrecheckFrom(DUPLICATE_TRANSACTION, OK, INSUFFICIENT_TX_FEE)
+ .hasKnownStatusFrom(SUCCESS,INVALID_SIGNATURE)
.noLogging()
.deferStatusResolution();
+
return Optional.of(op);
}
};
@@ -230,7 +247,8 @@ public class AccountBalancesClientSaveLoadTest extends LoadTest {
@Override
public Optional<HapiSpecOperation> get() {
int next;
- if ((next = createdSofar.getAndIncrement()) >= totalTestTokens) {
+ next = createdSofar.getAndIncrement();
+ if (next >= totalTestTokens) {
return Optional.empty();
}
var payingTreasury = String.format(ACCT_NAME_PREFIX + next); | ['test-clients/src/main/java/com/hedera/services/bdd/suites/perf/AccountBalancesClientSaveLoadTest.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,274,938 | 1,507,652 | 177,299 | 1,116 | 2,027 | 509 | 38 | 1 | 2,681 | 184 | 698 | 49 | 1 | 2 | 1970-01-01T00:26:58 | 220 | Java | {'Java': 47108298, 'PureBasic': 6600158, 'HTML': 648631, 'Solidity': 616825, 'Kotlin': 207028, 'Shell': 76275, 'Python': 34764, 'Go': 24380, 'Dockerfile': 21409, 'Batchfile': 7426, 'Perl': 7364} | Apache License 2.0 |
547 | cqframework/clinical_quality_language/653/652 | cqframework | clinical_quality_language | https://github.com/cqframework/clinical_quality_language/issues/652 | https://github.com/cqframework/clinical_quality_language/pull/653 | https://github.com/cqframework/clinical_quality_language/pull/653 | 1 | fix | cql-formatter Main produces unusable output | I was playing around with the tools/cql-formatter project and noticed that when you run the main method, it produces unusable output.
```
$ ./cql-formatter dummy.cql
org.cqframework.cql.tools.formatter.CqlFormatterVisitor$FormatResult@2dff7085
```
The code either needs to implement toString on FormatResult or interrogate the FormatResult in main and handle as needed. | 2f9691877459c85709791864a62a8f17fc533a3b | bf524c38f79294ac7ce55dbac099352bd46b2646 | https://github.com/cqframework/clinical_quality_language/compare/2f9691877459c85709791864a62a8f17fc533a3b...bf524c38f79294ac7ce55dbac099352bd46b2646 | diff --git a/Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/Main.java b/Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/Main.java
index 4e523ca5..33a8b491 100644
--- a/Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/Main.java
+++ b/Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/Main.java
@@ -19,6 +19,19 @@ public class Main {
is = new FileInputStream(inputFile);
}
- System.out.print(CqlFormatterVisitor.getFormattedOutput(is));
+ try {
+ CqlFormatterVisitor.FormatResult result = CqlFormatterVisitor.getFormattedOutput(is);
+ if( result.getErrors() != null && result.getErrors().size() > 0 ) {
+ for( Exception ex : result.getErrors() ) {
+ System.out.println( ex.getMessage() );
+ }
+ } else {
+ System.out.print( result.getOutput() );
+ }
+ } finally {
+ if( is != System.in ) {
+ try { is.close(); } catch( IOException iex ) { }
+ }
+ }
}
} | ['Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/Main.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,359,145 | 268,621 | 34,533 | 171 | 648 | 126 | 15 | 1 | 378 | 46 | 87 | 8 | 0 | 1 | 1970-01-01T00:27:06 | 217 | Java | {'Java': 3999293, 'C#': 276543, 'ANTLR': 42251, 'TSQL': 1292, 'Batchfile': 927} | Apache License 2.0 |
8,770 | aya-prover/aya-dev/674/672 | aya-prover | aya-dev | https://github.com/aya-prover/aya-dev/issues/672 | https://github.com/aya-prover/aya-dev/pull/674 | https://github.com/aya-prover/aya-dev/pull/674 | 1 | fix | Newly created upstream aya file in LSP was not added to the correct library | Buggy:
```log
[Log]: Created new file: \\success\\common\\src\\Homotopy\\icekiva.aya, mocked a library Homotopy for it
[Log]: Created new file: \\success\\common\\src\\ice4k.aya, mocked a library src for it
[Log]: Created new file: \\success\\src\\ProtoIssues\\ice5k.aya, added to owner: tests
```
Expected:
```log
[Log]: Created new file: \\success\\common\\src\\Homotopy\\icekiva.aya, added to owner: common
[Log]: Created new file: \\success\\common\\src\\ice4k.aya, added to owner: common
[Log]: Created new file: \\success\\src\\ProtoIssues\\ice5k.aya, added to owner: tests
``` | a88f24fab955edb262bdbed618723f0f88973c2d | 7a742c7f80894f4c065283bf9698cf404df005c5 | https://github.com/aya-prover/aya-dev/compare/a88f24fab955edb262bdbed618723f0f88973c2d...7a742c7f80894f4c065283bf9698cf404df005c5 | diff --git a/lsp/src/main/java/org/aya/lsp/server/AyaLanguageServer.java b/lsp/src/main/java/org/aya/lsp/server/AyaLanguageServer.java
index 90fe6b23b..6edd5a658 100644
--- a/lsp/src/main/java/org/aya/lsp/server/AyaLanguageServer.java
+++ b/lsp/src/main/java/org/aya/lsp/server/AyaLanguageServer.java
@@ -7,6 +7,7 @@ import kala.collection.immutable.ImmutableMap;
import kala.collection.immutable.ImmutableSeq;
import kala.collection.mutable.MutableList;
import kala.collection.mutable.MutableMap;
+import kala.collection.mutable.MutableSet;
import kala.control.Option;
import kala.tuple.Tuple;
import org.aya.cli.library.LibraryCompiler;
@@ -142,7 +143,23 @@ public class AyaLanguageServer implements LanguageServer {
if (path == null) return null;
var ayaJson = path.resolve(Constants.AYA_JSON);
if (!Files.exists(ayaJson)) return findOwner(path.getParent());
- return libraries.find(lib -> lib.underlyingLibrary().libraryRoot().equals(path)).getOrNull();
+ var book = MutableSet.<LibraryConfig>create();
+ for (var lib : libraries) {
+ var found = findOwner(book, lib, path);
+ if (found != null) return found;
+ }
+ return null;
+ }
+
+ private @Nullable LibraryOwner findOwner(@NotNull MutableSet<LibraryConfig> book, @NotNull LibraryOwner owner, @NotNull Path libraryRoot) {
+ if (book.contains(owner.underlyingLibrary())) return null;
+ book.add(owner.underlyingLibrary());
+ if (owner.underlyingLibrary().libraryRoot().equals(libraryRoot)) return owner;
+ for (var dep : owner.libraryDeps()) {
+ var found = findOwner(book, dep, libraryRoot);
+ if (found != null) return found;
+ }
+ return null;
}
private @Nullable LibrarySource find(@NotNull LibraryOwner owner, Path moduleFile) { | ['lsp/src/main/java/org/aya/lsp/server/AyaLanguageServer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,185,179 | 293,135 | 32,609 | 351 | 841 | 194 | 19 | 1 | 570 | 64 | 180 | 13 | 0 | 2 | 1970-01-01T00:27:48 | 211 | Java | {'Java': 1572742, 'Kotlin': 23129, 'Groovy': 9402, 'Shell': 8693, 'JavaScript': 8595, 'Lex': 7597, 'ANTLR': 6489, 'CSS': 3870, 'HTML': 859, 'Nix': 689, 'Batchfile': 487} | MIT License |
8,774 | aya-prover/aya-dev/476/472 | aya-prover | aya-dev | https://github.com/aya-prover/aya-dev/issues/472 | https://github.com/aya-prover/aya-dev/pull/476 | https://github.com/aya-prover/aya-dev/pull/476 | 1 | fix | Pretty printer bug | This assertion
```patch
Index: base/src/test/java/org/aya/core/DistillerTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
diff --git a/base/src/test/java/org/aya/core/DistillerTest.java b/base/src/test/java/org/aya/core/DistillerTest.java
--- a/base/src/test/java/org/aya/core/DistillerTest.java (revision 3275eeb0b50bc6cec23bbdacf86aa6c734cc02cb)
+++ b/base/src/test/java/org/aya/core/DistillerTest.java (date 1662024364066)
@@ -88,6 +88,18 @@
assertEquals("(Pi (A : Type 0) -> A) = X", test2.toDoc(DistillerOptions.informative()).debugRender());
}
+ @Test public void binop() {
+ var decls = TyckDeclTest.successTyckDecls("""
+ open data Nat | zero | suc Nat
+ open data D | infix · Nat Nat
+
+ def g (h : Nat -> D) : Nat => zero
+ def t (n : Nat) => g (n ·)
+ """)._2;
+ var t = ((FnDef) decls.get(3)).body.getLeftValue();
+ assertEquals("g (n ·)", t.toDoc(DistillerOptions.informative()).debugRender());
+ }
+
private @NotNull Doc declDoc(@Language("TEXT") String text) {
return Doc.vcat(TyckDeclTest.successTyckDecls(text)._2.map(d -> d.toDoc(DistillerOptions.debug())));
}
```
would fail:
```
org.opentest4j.AssertionFailedError:
Expected :g (n ·)
Actual :g · n
```
| 740b8c467d91c5e3c74ef2b539de5ed37908ddac | 7963a1b084eaa52ab92ccbfc2268454406bb1e3c | https://github.com/aya-prover/aya-dev/compare/740b8c467d91c5e3c74ef2b539de5ed37908ddac...7963a1b084eaa52ab92ccbfc2268454406bb1e3c | diff --git a/base/src/main/java/org/aya/distill/CoreDistiller.java b/base/src/main/java/org/aya/distill/CoreDistiller.java
index 22078a01b..5544d83c9 100644
--- a/base/src/main/java/org/aya/distill/CoreDistiller.java
+++ b/base/src/main/java/org/aya/distill/CoreDistiller.java
@@ -73,7 +73,7 @@ public class CoreDistiller extends BaseDistiller<Term> {
var style = chooseStyle(defVar);
bodyDoc = style != null
? visitArgsCalls(defVar, style, args, Outer.Free)
- : visitCalls(false, varDoc(defVar), args, Outer.Free,
+ : visitCalls(defVar.isInfix(), varDoc(defVar), args, params.isEmpty() ? outer : Outer.Free,
options.map.get(DistillerOptions.Key.ShowImplicitArgs));
}
} else bodyDoc = term(Outer.Free, body);
diff --git a/base/src/test/java/org/aya/core/DistillerTest.java b/base/src/test/java/org/aya/core/DistillerTest.java
index 8c49a85db..4fe5721b4 100644
--- a/base/src/test/java/org/aya/core/DistillerTest.java
+++ b/base/src/test/java/org/aya/core/DistillerTest.java
@@ -88,6 +88,18 @@ public class DistillerTest {
assertEquals("(Pi (A : Type 0) -> A) = X", test2.toDoc(DistillerOptions.informative()).debugRender());
}
+ @Test public void binop() {
+ var decls = TyckDeclTest.successTyckDecls("""
+ open data Nat | zero | suc Nat
+ open data D | infix · Nat Nat
+
+ def g (h : Nat -> D) : Nat => zero
+ def t (n : Nat) => g (n ·)
+ """)._2;
+ var t = ((FnDef) decls.get(3)).body.getLeftValue();
+ assertEquals("g (n ·)", t.toDoc(DistillerOptions.informative()).debugRender());
+ }
+
private @NotNull Doc declDoc(@Language("TEXT") String text) {
return Doc.vcat(TyckDeclTest.successTyckDecls(text)._2.map(d -> d.toDoc(DistillerOptions.debug())));
} | ['base/src/main/java/org/aya/distill/CoreDistiller.java', 'base/src/test/java/org/aya/core/DistillerTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 951,835 | 229,182 | 26,016 | 321 | 175 | 42 | 2 | 1 | 1,399 | 133 | 414 | 40 | 0 | 2 | 1970-01-01T00:27:42 | 211 | Java | {'Java': 1572742, 'Kotlin': 23129, 'Groovy': 9402, 'Shell': 8693, 'JavaScript': 8595, 'Lex': 7597, 'ANTLR': 6489, 'CSS': 3870, 'HTML': 859, 'Nix': 689, 'Batchfile': 487} | MIT License |
8,810 | advanced-rocketry/advancedrocketry/2306/2279 | advanced-rocketry | advancedrocketry | https://github.com/Advanced-Rocketry/AdvancedRocketry/issues/2279 | https://github.com/Advanced-Rocketry/AdvancedRocketry/pull/2306 | https://github.com/Advanced-Rocketry/AdvancedRocketry/pull/2306 | 1 | fixes | [BUG] Multiplayer observatory NullPointerException on latest version |
## Version of Advanced Rocketry
> 2.0.0-257
> The latest code from the 1.12.2 branch on github
## Have you verified this is an issue in the latest unstable build
- [x] Y
- [] N
- [] N/A
## Version of LibVulpes
> 0.4.2-87
> The latest code from the 1.12.2 branch
## Version of Minecraft
>1.12.2
## Does this occur without other mods installed
- [x] Y
- [] N
- [] N/A
## Crash report or log or visualVM (if applicable)
http://pastebin.com is a good place to put them
crash reports that are put in the issue itself are
hard to read
>(257 crash report) https://pastebin.com/QrJrXQz4
>(latest source code) https://pastebin.com/H0DzMWtg
## Description of the problem
>Note: The problem occurs exclusively in multiplayer. And the error only occurs in the client.
>Description: When trying to view asteroid contents in the observatory, a NullPointerException occurs in the client and no asteroid content information is shown in the GUI.
>Step 1: Create 1.12.2 multiplayer server
>Step 2: Build observatory multiblock
>Step 3: Insert data storage module with distance data
>Step 4: Scan for asteroids
>Step 5: Click on any asteroid. Nothing will appear in the right hand side of the GUI, and there will be a NullPointerException in the client log.
| 614442ed004093dbe20719d33ce3b71e278d7899 | 6f547c0f01755c0ae6eb967faf818871278ea0b6 | https://github.com/advanced-rocketry/advancedrocketry/compare/614442ed004093dbe20719d33ce3b71e278d7899...6f547c0f01755c0ae6eb967faf818871278ea0b6 | diff --git a/src/main/java/zmaster587/advancedRocketry/api/ARConfiguration.java b/src/main/java/zmaster587/advancedRocketry/api/ARConfiguration.java
index 6a0e6361..c3031bfe 100644
--- a/src/main/java/zmaster587/advancedRocketry/api/ARConfiguration.java
+++ b/src/main/java/zmaster587/advancedRocketry/api/ARConfiguration.java
@@ -167,6 +167,7 @@ public class ARConfiguration {
out.writeFloat(asteroid.richnessVariability); //variability of richness
out.writeFloat(asteroid.probability); //probability of the asteroid spawning
out.writeFloat(asteroid.timeMultiplier);
+ out.writeItemStack(asteroid.baseStack);
out.writeInt(asteroid.stackProbabilities.size());
for(int i = 0; i < asteroid.stackProbabilities.size(); i++)
@@ -245,6 +246,12 @@ public class ARConfiguration {
asteroid.richnessVariability = in.readFloat(); //variability of richness
asteroid.probability = in.readFloat(); //probability of the asteroid spawning
asteroid.timeMultiplier = in.readFloat();
+ try {
+ asteroid.baseStack = in.readItemStack();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
int size = in.readInt();
for(int i = 0; i < size; i++) | ['src/main/java/zmaster587/advancedRocketry/api/ARConfiguration.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,702,130 | 691,095 | 75,690 | 510 | 166 | 43 | 7 | 1 | 1,313 | 211 | 347 | 52 | 3 | 0 | 1970-01-01T00:27:29 | 206 | Java | {'Java': 2702080, 'Kotlin': 9412, 'Shell': 6150} | MIT License |
793 | innovativeonlineindustries/industrial-foregoing/848/822 | innovativeonlineindustries | industrial-foregoing | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/issues/822 | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/pull/848 | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/pull/848 | 1 | closes | [1.16] Issue Title: Fluid Extractor continues to drain power when internal buffer is full. | **Industrial Foregoing Version:** industrial-foregoing-1.16.1-3.0.4-eeeb31e
**Titanium Version:**
**Crashlog If Applicable (Upload to pastebin/gist):**
Expected Operation: When internal buffer is full of fluid, machine pauses operation, and ceases to drain power.
Actual Operation: When internal buffer is full of fluid, machine continues to operate, and voids additional fluids.
| a2e2ddff6c3187100984084b449aa83ff4c16167 | ee0bde61845bde1d90818f6c0d3d3dfa37658f6f | https://github.com/innovativeonlineindustries/industrial-foregoing/compare/a2e2ddff6c3187100984084b449aa83ff4c16167...ee0bde61845bde1d90818f6c0d3d3dfa37658f6f | diff --git a/src/main/java/com/buuz135/industrial/block/core/tile/FluidExtractorTile.java b/src/main/java/com/buuz135/industrial/block/core/tile/FluidExtractorTile.java
index dc1847ba..e93edb26 100644
--- a/src/main/java/com/buuz135/industrial/block/core/tile/FluidExtractorTile.java
+++ b/src/main/java/com/buuz135/industrial/block/core/tile/FluidExtractorTile.java
@@ -48,7 +48,7 @@ public class FluidExtractorTile extends IndustrialAreaWorkingTile<FluidExtractor
if (isLoaded(pos)) {
if (currentRecipe == null || !currentRecipe.matches(this.world, pos))
currentRecipe = findRecipe(this.world, pos);
- if (currentRecipe != null) {//GetDimensionType
+ if (currentRecipe != null && this.tank.getFluidAmount() < this.tank.getCapacity()) {//GetDimensionType
FluidExtractionProgress extractionProgress = EXTRACTION.computeIfAbsent(this.world.func_230315_m_(), dimensionType -> new HashMap<>()).computeIfAbsent(this.world.getChunkAt(pos).getPos(), chunkPos -> new HashMap<>()).computeIfAbsent(pos, pos1 -> new FluidExtractionProgress(this.world));
tank.fillForced(currentRecipe.output.copy(), IFluidHandler.FluidAction.EXECUTE);
if (this.world.rand.nextDouble() <= currentRecipe.breakChance) { | ['src/main/java/com/buuz135/industrial/block/core/tile/FluidExtractorTile.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 965,028 | 212,624 | 22,118 | 275 | 175 | 40 | 2 | 1 | 393 | 46 | 95 | 10 | 0 | 0 | 1970-01-01T00:26:41 | 205 | Java | {'Java': 1983332, 'Mustache': 203} | MIT License |
794 | innovativeonlineindustries/industrial-foregoing/844/830 | innovativeonlineindustries | industrial-foregoing | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/issues/830 | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/pull/844 | https://github.com/InnovativeOnlineIndustries/Industrial-Foregoing/pull/844 | 1 | closes | [1.16] Biofuel generators always using biofuel | **Industrial Foregoing Version:** 1.16.1-3.0.5
**Titanium Version:** 1.16.1-3.0.4
**Crashlog If Applicable (Upload to pastebin/gist):** N/A
Biofuel generators always burnign fuel even when full of energy.
https://youtu.be/c6kKxCg1ofw
| 34c35d67684e79076b3577e9f2e2a85d02842661 | 955a1e86688e4597032aa6f62f6cba55bfe25966 | https://github.com/innovativeonlineindustries/industrial-foregoing/compare/34c35d67684e79076b3577e9f2e2a85d02842661...955a1e86688e4597032aa6f62f6cba55bfe25966 | diff --git a/src/main/java/com/buuz135/industrial/block/generator/tile/BiofuelGeneratorTile.java b/src/main/java/com/buuz135/industrial/block/generator/tile/BiofuelGeneratorTile.java
index c5c275a4..89a23399 100644
--- a/src/main/java/com/buuz135/industrial/block/generator/tile/BiofuelGeneratorTile.java
+++ b/src/main/java/com/buuz135/industrial/block/generator/tile/BiofuelGeneratorTile.java
@@ -44,7 +44,7 @@ public class BiofuelGeneratorTile extends IndustrialGeneratorTile<BiofuelGenerat
@Override
public boolean canStart() {
- return biofuel.getFluidAmount() > 0;
+ return biofuel.getFluidAmount() > 0 && this.getEnergyStorage().getEnergyStored() < this.getEnergyStorage().getMaxEnergyStored();
}
@Override | ['src/main/java/com/buuz135/industrial/block/generator/tile/BiofuelGeneratorTile.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 964,980 | 212,608 | 22,118 | 275 | 183 | 44 | 2 | 1 | 249 | 25 | 83 | 11 | 1 | 0 | 1970-01-01T00:26:41 | 205 | Java | {'Java': 1983332, 'Mustache': 203} | MIT License |
418 | haxefoundation/intellij-haxe/879/875 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/875 | https://github.com/HaxeFoundation/intellij-haxe/pull/879 | https://github.com/HaxeFoundation/intellij-haxe/pull/879 | 1 | closes | ClassCastException: TwigFile cannot be cast to HaxeFile | I think that we need to check the file type before processing inspections. TwigFiles are PHP templates and this probably occurred in PhpStorm, rather than IDEA.
JetBrains report:
```
java.lang.ClassCastException: com.jetbrains.twig.TwigFile cannot be cast to com.intellij.plugins.haxe.lang.psi.HaxeFile
at com.intellij.plugins.haxe.util.HaxeImportUtil.findUnusedImports(HaxeImportUtil.java:37)
at com.intellij.plugins.haxe.ide.inspections.HaxeUnusedImportInspection.checkFile(HaxeUnusedImportInspection.java:71)
at com.intellij.codeInspection.LocalInspectionTool$1.visitFile(LocalInspectionTool.java:141)
at com.intellij.extapi.psi.PsiFileBase.accept(PsiFileBase.java:70)
at com.intellij.codeInspection.InspectionEngine.acceptElements(InspectionEngine.java:75)
at com.intellij.codeInsight.daemon.impl.LocalInspectionsPass.lambda$visitRestElementsAndCleanup$4(LocalInspectionsPass.java:299)
at com.intellij.concurrency.ApplierCompleter.execAndForkSubTasks(ApplierCompleter.java:133)
at com.intellij.concurrency.ApplierCompleter.tryToExecAllList(ApplierCompleter.java:223)
at com.intellij.concurrency.ApplierCompleter.execAndForkSubTasks(ApplierCompleter.java:151)
at com.intellij.concurrency.ApplierCompleter.tryToExecAllList(ApplierCompleter.java:223)
at com.intellij.concurrency.ApplierCompleter.execAndForkSubTasks(ApplierCompleter.java:151)
at com.intellij.openapi.application.impl.ApplicationImpl.tryRunReadAction(ApplicationImpl.java:1175)
at com.intellij.concurrency.ApplierCompleter.lambda$wrapInReadActionAndIndicator$1(ApplierCompleter.java:105)
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:583)
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:533)
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:87)
at com.intellij.concurrency.ApplierCompleter.wrapInReadActionAndIndicator(ApplierCompleter.java:116)
at com.intellij.concurrency.ApplierCompleter.lambda$compute$0(ApplierCompleter.java:96)
at com.intellij.openapi.application.impl.ReadMostlyRWLock.executeByImpatientReader(ReadMostlyRWLock.java:147)
at com.intellij.openapi.application.impl.ApplicationImpl.executeByImpatientReader(ApplicationImpl.java:223)
at com.intellij.concurrency.ApplierCompleter.compute(ApplierCompleter.java:96)
at java.util.concurrent.CountedCompleter.exec(CountedCompleter.java:731)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.pollAndExecCC(ForkJoinPool.java:1190)
at java.util.concurrent.ForkJoinPool.helpComplete(ForkJoinPool.java:1879)
at java.util.concurrent.ForkJoinPool.awaitJoin(ForkJoinPool.java:2045)
at java.util.concurrent.ForkJoinTask.doJoin(ForkJoinTask.java:390)
at java.util.concurrent.ForkJoinTask.join(ForkJoinTask.java:719)
at java.util.concurrent.ForkJoinPool.invoke(ForkJoinPool.java:2616)
at com.intellij.concurrency.JobLauncherImpl.invokeConcurrentlyUnderProgress(JobLauncherImpl.java:65)
at com.intellij.concurrency.JobLauncher.invokeConcurrentlyUnderProgress(JobLauncher.java:56)
at com.intellij.codeInsight.daemon.impl.LocalInspectionsPass.visitRestElementsAndCleanup(LocalInspectionsPass.java:310)
at com.intellij.codeInsight.daemon.impl.LocalInspectionsPass.inspect(LocalInspectionsPass.java:221)
at com.intellij.codeInsight.daemon.impl.LocalInspectionsPass.collectInformationWithProgress(LocalInspectionsPass.java:119)
at com.intellij.codeInsight.daemon.impl.ProgressableTextEditorHighlightingPass.doCollectInformation(ProgressableTextEditorHighlightingPass.java:84)
at com.intellij.codeHighlighting.TextEditorHighlightingPass.collectInformation(TextEditorHighlightingPass.java:69)
at com.intellij.codeInsight.daemon.impl.PassExecutorService$ScheduledPass.lambda$null$1(PassExecutorService.java:423)
at com.intellij.openapi.application.impl.ApplicationImpl.tryRunReadAction(ApplicationImpl.java:1180)
at com.intellij.codeInsight.daemon.impl.PassExecutorService$ScheduledPass.lambda$doRun$2(PassExecutorService.java:416)
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:583)
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:533)
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:87)
at com.intellij.codeInsight.daemon.impl.PassExecutorService$ScheduledPass.doRun(PassExecutorService.java:415)
at com.intellij.codeInsight.daemon.impl.PassExecutorService$ScheduledPass.lambda$run$0(PassExecutorService.java:391)
at com.intellij.openapi.application.impl.ReadMostlyRWLock.executeByImpatientReader(ReadMostlyRWLock.java:147)
at com.intellij.openapi.application.impl.ApplicationImpl.executeByImpatientReader(ApplicationImpl.java:223)
at com.intellij.codeInsight.daemon.impl.PassExecutorService$ScheduledPass.run(PassExecutorService.java:389)
at com.intellij.concurrency.JobLauncherImpl$VoidForkJoinTask$1.exec(JobLauncherImpl.java:161)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
``` | 116ac2a00201dd8abde3384a941cf8f163c7c8f4 | b42d9df3db2274a515c3d748d34d57ae35fe7599 | https://github.com/haxefoundation/intellij-haxe/compare/116ac2a00201dd8abde3384a941cf8f163c7c8f4...b42d9df3db2274a515c3d748d34d57ae35fe7599 | diff --git a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeDeprecatedInspection.java b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeDeprecatedInspection.java
index a416c389..41a702de 100644
--- a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeDeprecatedInspection.java
+++ b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeDeprecatedInspection.java
@@ -25,6 +25,7 @@ import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.openapi.util.TextRange;
import com.intellij.plugins.haxe.HaxeBundle;
import com.intellij.plugins.haxe.ide.annotator.HaxeAnnotatingVisitor;
+import com.intellij.plugins.haxe.lang.psi.HaxeFile;
import com.intellij.plugins.haxe.lang.psi.HaxeMethodDeclaration;
import com.intellij.plugins.haxe.lang.psi.HaxeReferenceExpression;
import com.intellij.plugins.haxe.lang.psi.HaxeFieldDeclaration;
@@ -68,6 +69,7 @@ public class HaxeDeprecatedInspection extends LocalInspectionTool {
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
+ if (!(file instanceof HaxeFile)) return null;
final List<ProblemDescriptor> result = new ArrayList<ProblemDescriptor>();
new HaxeAnnotatingVisitor() {
@Override
diff --git a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxePreprocessorInspection.java b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxePreprocessorInspection.java
index 7276c49a..8dcd218b 100644
--- a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxePreprocessorInspection.java
+++ b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxePreprocessorInspection.java
@@ -22,6 +22,7 @@ import com.intellij.lang.ASTNode;
import com.intellij.lang.FileASTNode;
import com.intellij.plugins.haxe.HaxeBundle;
import com.intellij.plugins.haxe.lang.lexer.HaxeTokenTypes;
+import com.intellij.plugins.haxe.lang.psi.HaxeFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.impl.source.tree.TreeUtil;
@@ -65,7 +66,7 @@ public class HaxePreprocessorInspection extends LocalInspectionTool {
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
-
+ if (!(file instanceof HaxeFile)) return null;
final List<ProblemDescriptor> result = new ArrayList<ProblemDescriptor>();
final ProblemReporter reporter = new ProblemReporter() {
@Override
diff --git a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnresolvedSymbolInspection.java b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnresolvedSymbolInspection.java
index be08cee4..204e22a4 100644
--- a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnresolvedSymbolInspection.java
+++ b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnresolvedSymbolInspection.java
@@ -24,6 +24,7 @@ import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.openapi.util.TextRange;
import com.intellij.plugins.haxe.HaxeBundle;
import com.intellij.plugins.haxe.ide.annotator.HaxeAnnotatingVisitor;
+import com.intellij.plugins.haxe.lang.psi.HaxeFile;
import com.intellij.plugins.haxe.lang.psi.HaxeReferenceExpression;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
@@ -65,6 +66,7 @@ public class HaxeUnresolvedSymbolInspection extends LocalInspectionTool {
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
+ if (!(file instanceof HaxeFile)) return null;
final List<ProblemDescriptor> result = new ArrayList<ProblemDescriptor>();
new HaxeAnnotatingVisitor() {
@Override
diff --git a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnusedImportInspection.java b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnusedImportInspection.java
index 84644683..78def3d6 100644
--- a/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnusedImportInspection.java
+++ b/src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnusedImportInspection.java
@@ -26,6 +26,7 @@ import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.plugins.haxe.HaxeBundle;
import com.intellij.plugins.haxe.ide.HaxeImportOptimizer;
+import com.intellij.plugins.haxe.lang.psi.HaxeFile;
import com.intellij.plugins.haxe.lang.psi.HaxeImportStatement;
import com.intellij.plugins.haxe.util.HaxeImportUtil;
import com.intellij.psi.PsiElement;
@@ -68,8 +69,8 @@ public class HaxeUnusedImportInspection extends LocalInspectionTool {
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull InspectionManager manager, boolean isOnTheFly) {
+ if (!(file instanceof HaxeFile)) return null;
List<HaxeImportStatement> unusedImports = HaxeImportUtil.findUnusedImports(file);
-
if (unusedImports.isEmpty()) {
return ProblemDescriptor.EMPTY_ARRAY;
}
@@ -116,4 +117,6 @@ public class HaxeUnusedImportInspection extends LocalInspectionTool {
});
}
};
+
+
} | ['src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnresolvedSymbolInspection.java', 'src/common/com/intellij/plugins/haxe/ide/inspections/HaxePreprocessorInspection.java', 'src/common/com/intellij/plugins/haxe/ide/inspections/HaxeUnusedImportInspection.java', 'src/common/com/intellij/plugins/haxe/ide/inspections/HaxeDeprecatedInspection.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 2,935,913 | 692,417 | 88,076 | 641 | 420 | 97 | 12 | 4 | 5,493 | 141 | 1,217 | 58 | 0 | 1 | 1970-01-01T00:25:37 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
420 | haxefoundation/intellij-haxe/800/799 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/799 | https://github.com/HaxeFoundation/intellij-haxe/pull/800 | https://github.com/HaxeFoundation/intellij-haxe/pull/800 | 1 | closes | Performance issues with indexing and syntax inspections | Found performance issues that was introduced by this commit 0f734746972da141fe57846b303fed8e110aa89f
On project with multiple roots it drastically slowing indexing and syntax annotation performance.
Caching with invalidating listeners are required. | 2415fa9251d7978e87751bf71b780087b6752e77 | e406499202b3e615a85d97ffccfe41e168fbdcda | https://github.com/haxefoundation/intellij-haxe/compare/2415fa9251d7978e87751bf71b780087b6752e77...e406499202b3e615a85d97ffccfe41e168fbdcda | diff --git a/src/common/com/intellij/plugins/haxe/model/HaxePackageModel.java b/src/common/com/intellij/plugins/haxe/model/HaxePackageModel.java
index 6a1ff398..68af795d 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxePackageModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxePackageModel.java
@@ -39,11 +39,10 @@ public class HaxePackageModel implements HaxeExposableModel {
private final FullyQualifiedInfo qualifiedInfo;
- public HaxePackageModel(@NotNull HaxeProjectModel project,
- @NotNull HaxeSourceRootModel root,
+ public HaxePackageModel(@NotNull HaxeSourceRootModel root,
@NotNull String name,
@Nullable HaxePackageModel parent) {
- this.project = project;
+ this.project = root.project;
this.name = name;
this.root = root;
this.parent = parent;
@@ -102,7 +101,7 @@ public class HaxePackageModel implements HaxeExposableModel {
} else {
PsiDirectory directory = root.access(path.isEmpty() ? name : path + '.' + name);
if (directory != null) {
- return new HaxePackageModel(project, root, name, this);
+ return new HaxePackageModel(root, name, this);
}
}
@@ -114,7 +113,7 @@ public class HaxePackageModel implements HaxeExposableModel {
PsiDirectory directory = root.access(path);
if (directory != null) {
return Arrays.stream(directory.getSubdirectories())
- .map(subDirectory -> new HaxePackageModel(project, root, subDirectory.getName(), this))
+ .map(subDirectory -> new HaxePackageModel(root, subDirectory.getName(), this))
.collect(Collectors.toList());
}
return Collections.emptyList();
diff --git a/src/common/com/intellij/plugins/haxe/model/HaxeProjectModel.java b/src/common/com/intellij/plugins/haxe/model/HaxeProjectModel.java
index 6fc68485..1b11e96f 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxeProjectModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxeProjectModel.java
@@ -15,9 +15,13 @@
*/
package com.intellij.plugins.haxe.model;
+import com.intellij.ProjectTopics;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
+import com.intellij.openapi.roots.ModuleRootEvent;
+import com.intellij.openapi.roots.ModuleRootListener;
import com.intellij.openapi.roots.OrderEnumerator;
+import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiElement;
@@ -25,59 +29,37 @@ import com.intellij.psi.search.GlobalSearchScope;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.intellij.plugins.haxe.model.HaxeStdTypesFileModel.STD_TYPES_HX;
public class HaxeProjectModel {
+ private static final Key<HaxeProjectModel> HAXE_PROJECT_MODEL_KEY = new Key<>("HAXE_PROJECT_MODEL");
private final Project project;
- private final HaxePackageModel stdPackage;
- private final HaxeSourceRootModel sdkRoot;
+
+ private RootsCache rootsCache;
private HaxeProjectModel(Project project) {
this.project = project;
- this.sdkRoot = resolveSdkRoot();
- this.stdPackage = new HaxeStdPackageModel(this, getSdkRoot());
- }
-
- private HaxeSourceRootModel resolveSdkRoot() {
- if (ApplicationManager.getApplication().isUnitTestMode()) {
- final VirtualFile[] roots;
-
- roots = OrderEnumerator.orderEntries(project).getAllSourceRoots();
- if (roots.length > 0) {
- VirtualFile stdRootForTests = roots[0].findChild("std");
- if (stdRootForTests != null) {
- return new HaxeSourceRootModel(this, stdRootForTests);
- }
- }
- } else {
- VirtualFile root = detectProperSDKSourceRoot(OrderEnumerator.orderEntries(project).sdkOnly().getAllSourceRoots());
-
- if (root != null) {
- return new HaxeSourceRootModel(this, root);
- }
- }
- return HaxeSourceRootModel.DUMMY;
+ addProjectListeners();
}
- private VirtualFile detectProperSDKSourceRoot(VirtualFile[] roots) {
- for (VirtualFile root : roots) {
- if (root.findChild(STD_TYPES_HX) != null) return root;
- }
-
- return null;
- }
-
-
public static HaxeProjectModel fromElement(PsiElement element) {
return fromProject(element.getProject());
}
public static HaxeProjectModel fromProject(Project project) {
- return new HaxeProjectModel(project);
+ HaxeProjectModel model = project.getUserData(HAXE_PROJECT_MODEL_KEY);
+ if (model == null) {
+ model = new HaxeProjectModel(project);
+ project.putUserData(HAXE_PROJECT_MODEL_KEY, model);
+ }
+
+ return model;
}
public Project getProject() {
@@ -89,25 +71,17 @@ public class HaxeProjectModel {
}
public List<HaxeSourceRootModel> getRoots() {
- OrderEnumerator enumerator = OrderEnumerator.orderEntries(project).withoutSdk();
-
- return Stream.concat(
- Arrays.stream(enumerator.getClassesRoots()),
- Arrays.stream(enumerator.getSourceRoots())
- )
- .distinct()
- .map(root -> new HaxeSourceRootModel(this, root))
- .collect(Collectors.toList());
+ return getRootsCache().roots;
}
@NotNull
public HaxeSourceRootModel getSdkRoot() {
- return sdkRoot;
+ return getRootsCache().sdkRoot;
}
@NotNull
public HaxePackageModel getStdPackage() {
- return stdPackage;
+ return getRootsCache().stdPackageModel;
}
@Nullable
@@ -128,8 +102,8 @@ public class HaxeProjectModel {
if (resolvedValue != null) result.add(resolvedValue);
}
- if (result.isEmpty() && stdPackage != null) {
- resolvedValue = stdPackage.resolve(info);
+ if (result.isEmpty()) {
+ resolvedValue = getStdPackage().resolve(info);
if (resolvedValue != null) result.add(resolvedValue);
}
@@ -160,4 +134,69 @@ public class HaxeProjectModel {
}
return null;
}
+
+ private void addProjectListeners() {
+ project.getMessageBus().connect().subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() {
+ @Override
+ public void rootsChanged(ModuleRootEvent event) {
+ rootsCache = null;
+ }
+ });
+ }
+
+ private RootsCache getRootsCache() {
+ if (rootsCache == null) {
+ rootsCache = RootsCache.fromProjectModel(this);
+ }
+ return rootsCache;
+ }
+}
+
+class RootsCache {
+ final List<HaxeSourceRootModel> roots;
+ final HaxeSourceRootModel sdkRoot;
+ final HaxeStdPackageModel stdPackageModel;
+
+ private RootsCache(List<HaxeSourceRootModel> roots, HaxeSourceRootModel sdkRoot) {
+ this.roots = roots;
+ this.sdkRoot = sdkRoot;
+ this.stdPackageModel = new HaxeStdPackageModel(sdkRoot);
+ }
+
+ static RootsCache fromProjectModel(HaxeProjectModel model) {
+ return new RootsCache(getProjectRoots(model), getSdkRoot(model));
+ }
+
+ private static List<HaxeSourceRootModel> getProjectRoots(final HaxeProjectModel model) {
+ final OrderEnumerator enumerator = OrderEnumerator.orderEntries(model.getProject()).withoutSdk();
+
+ return Stream.concat(
+ Arrays.stream(enumerator.getClassesRoots()),
+ Arrays.stream(enumerator.getSourceRoots())
+ )
+ .distinct()
+ .map(root -> new HaxeSourceRootModel(model, root))
+ .collect(Collectors.toList());
+ }
+
+ private static HaxeSourceRootModel getSdkRoot(final HaxeProjectModel model) {
+ final VirtualFile[] roots;
+ if (ApplicationManager.getApplication().isUnitTestMode()) {
+ roots = OrderEnumerator.orderEntries(model.getProject()).getAllSourceRoots();
+ if (roots.length > 0) {
+ VirtualFile stdRootForTests = roots[0].findChild("std");
+ if (stdRootForTests != null) {
+ return new HaxeSourceRootModel(model, stdRootForTests);
+ }
+ }
+ } else {
+ roots = OrderEnumerator.orderEntries(model.getProject()).sdkOnly().getAllSourceRoots();
+ for (VirtualFile root : roots) {
+ if (root.findChild(STD_TYPES_HX) != null) {
+ return new HaxeSourceRootModel(model, root);
+ }
+ }
+ }
+ return HaxeSourceRootModel.DUMMY;
+ }
}
\\ No newline at end of file
diff --git a/src/common/com/intellij/plugins/haxe/model/HaxeSourceRootModel.java b/src/common/com/intellij/plugins/haxe/model/HaxeSourceRootModel.java
index 25d575e3..bc97de1a 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxeSourceRootModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxeSourceRootModel.java
@@ -37,7 +37,7 @@ public class HaxeSourceRootModel {
rootPackage = null;
}
else {
- rootPackage = new HaxePackageModel(project, this, "", null);
+ rootPackage = new HaxePackageModel(this, "", null);
}
}
diff --git a/src/common/com/intellij/plugins/haxe/model/HaxeStdPackageModel.java b/src/common/com/intellij/plugins/haxe/model/HaxeStdPackageModel.java
index f4237e89..88890a6d 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxeStdPackageModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxeStdPackageModel.java
@@ -23,9 +23,8 @@ public class HaxeStdPackageModel extends HaxePackageModel {
private static final String STD_TYPES = "StdTypes";
private final HaxeFileModel stdTypesModel;
- HaxeStdPackageModel(@NotNull HaxeProjectModel project,
- @NotNull HaxeSourceRootModel root) {
- super(project, root, "", null);
+ HaxeStdPackageModel(@NotNull HaxeSourceRootModel root) {
+ super(root, "", null);
this.stdTypesModel = this.getStdFileModel();
}
diff --git a/src/common/com/intellij/plugins/haxe/util/HaxeResolveUtil.java b/src/common/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
index 2df26f25..2f6423e1 100644
--- a/src/common/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
+++ b/src/common/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
@@ -617,12 +617,12 @@ public class HaxeResolveUtil {
List<HaxeClassModel> models = file.getClassModels();
final Stream<HaxeClassModel> classesStream = models.stream().filter(model -> name.equals(model.getName()));
final Stream<HaxeEnumValueModel> enumsStream = models.stream().filter(model -> model instanceof HaxeEnumModel)
- .map(model -> ((HaxeEnumModel)model).getValue(name))
- .filter(Objects::nonNull);
+ .map(model -> ((HaxeEnumModel)model).getValue(name))
+ .filter(Objects::nonNull);
final HaxeModel result = Stream.concat(classesStream, enumsStream)
- .findFirst()
- .orElse(null);
+ .findFirst()
+ .orElse(null);
return result != null ? result.getBasePsi() : null;
}
@@ -658,6 +658,15 @@ public class HaxeResolveUtil {
}
public static String getQName(PsiElement[] fileChildren, final String result, boolean searchInSamePackage) {
+ final HaxeClass classForType = (HaxeClass)Arrays.stream(fileChildren)
+ .filter(child -> child instanceof HaxeClass && result.equals(((HaxeClass)child).getName()))
+ .findFirst()
+ .orElse(null);
+
+ if (classForType != null) {
+ return classForType.getQualifiedName();
+ }
+
final HaxeImportStatement importStatement =
(HaxeImportStatement)(StreamUtil.reverse(Arrays.stream(fileChildren))
.filter(element ->
@@ -667,27 +676,23 @@ public class HaxeResolveUtil {
.orElse(null));
final HaxeExpression importStatementExpression = importStatement == null ? null : importStatement.getReferenceExpression();
- final String packageName = getPackageName(
- (HaxePackageStatement)Arrays.stream(fileChildren)
- .filter(element -> element instanceof HaxePackageStatement)
- .findFirst()
- .orElse(null)
- );
+ if (importStatementExpression != null) {
+ return importStatementExpression.getText();
+ }
- final HaxeClass classForType = (HaxeClass)ContainerUtil.find(fileChildren, new Condition<PsiElement>() {
- @Override
- public boolean value(PsiElement element) {
- return element instanceof HaxeClass && result.equals(((HaxeClass)element).getName());
+ if (searchInSamePackage && fileChildren.length > 0) {
+ final HaxeFileModel fileModel = HaxeFileModel.fromElement(fileChildren[0]);
+ if (fileModel != null) {
+ final HaxePackageModel packageModel = fileModel.getPackageModel();
+ if (packageModel != null) {
+ final HaxeClassModel classModel = packageModel.getClassModel(result);
+ if (classModel != null) {
+ return classModel.haxeClass.getQualifiedName();
+ }
+ }
}
- });
-
- if (classForType != null) {
- return classForType.getQualifiedName();
- } else if (importStatement != null && importStatementExpression != null) {
- return importStatementExpression.getText();
- } else if (searchInSamePackage && !packageName.isEmpty()) {
- return packageName + "." + result;
}
+
return result;
}
| ['src/common/com/intellij/plugins/haxe/model/HaxePackageModel.java', 'src/common/com/intellij/plugins/haxe/model/HaxeStdPackageModel.java', 'src/common/com/intellij/plugins/haxe/util/HaxeResolveUtil.java', 'src/common/com/intellij/plugins/haxe/model/HaxeProjectModel.java', 'src/common/com/intellij/plugins/haxe/model/HaxeSourceRootModel.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 2,844,311 | 672,096 | 85,556 | 629 | 7,824 | 1,707 | 202 | 5 | 253 | 29 | 54 | 4 | 0 | 0 | 1970-01-01T00:25:23 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
425 | haxefoundation/intellij-haxe/193/168 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/168 | https://github.com/HaxeFoundation/intellij-haxe/pull/193 | https://github.com/HaxeFoundation/intellij-haxe/pull/193 | 1 | fixes | "update()" in FlxState are "unresolved symbols" | see https://groups.google.com/forum/#!msg/haxeflixel/gGmJ_B3x6tQ/Dxltcsvsi7sJ
we can reproduce the bug by open structure view for Bar.hx with the following code:
//file Foo.hx
```
class Foo { }
```
//file Col.hx
```
class Col<T> {
var iiii: Int;
}
```
//file Bar.hx
```
class Bar extends Col<Foo> {
var ssss: String;
}
```
Note that, if we put all class in one file then structure view will show Col as super class of Bar but if put each class in one file then structure view (for Bar.hx) will NOT show Col.
| 4efd5ae724c13c057790d2b06c2ce039876e4a8f | ac392849a72883aeeaea8774a5fa71a0a16368b1 | https://github.com/haxefoundation/intellij-haxe/compare/4efd5ae724c13c057790d2b06c2ce039876e4a8f...ac392849a72883aeeaea8774a5fa71a0a16368b1 | diff --git a/src/com/intellij/plugins/haxe/util/HaxeResolveUtil.java b/src/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
index 7a4c9ada..872558c0 100644
--- a/src/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
+++ b/src/com/intellij/plugins/haxe/util/HaxeResolveUtil.java
@@ -530,8 +530,8 @@ public class HaxeResolveUtil {
return null;
}
- String name = getQName(type, false);
- HaxeClass result = findClassByQName(name, type.getContext());
+ String name = getQName(type);
+ HaxeClass result = name == null? tryResolveClassByQNameWhenGetQNameFail(type) : findClassByQName(name, type.getContext());
result = result != null ? result : tryFindHelper(type);
result = result != null ? result : findClassByQNameInSuperPackages(type);
return result;
@@ -565,7 +565,8 @@ public class HaxeResolveUtil {
return ownerClass == null ? null : findComponentDeclaration(ownerClass.getContainingFile(), element.getText());
}
- public static String getQName(@NotNull PsiElement type, boolean searchInSamePackage) {
+ @Nullable
+ private static String getQName(@NotNull PsiElement type) {
HaxeImportStatementWithInSupport importStatementWithInSupport = PsiTreeUtil.getParentOfType(type,
HaxeImportStatementWithInSupport.class,
false);
@@ -575,24 +576,20 @@ public class HaxeResolveUtil {
HaxeUsingStatement usingStatement = PsiTreeUtil.getParentOfType(type, HaxeUsingStatement.class, false);
if (usingStatement != null) {
- return usingStatement.getReferenceExpression().getText();
+ HaxeReferenceExpression expression = usingStatement.getReferenceExpression();
+ return expression == null? null : expression.getText();
}
- String text = type.getText();
if (type instanceof HaxeReferenceExpression) {
- String text1 = type.getText();
- PsiElement element = null;
if (type.getParent() instanceof HaxeReferenceExpression) {
- element = type;
+ PsiElement element = type;
while (element.getParent() instanceof HaxeReferenceExpression) {
element = element.getParent();
}
- if (element != null) {
- HaxeClass haxeClass = findClassByQName(element.getText(), element.getContext());
- if (haxeClass != null) {
- return element.getText();
- }
+ HaxeClass haxeClass = findClassByQName(element.getText(), element.getContext());
+ if (haxeClass != null) {
+ return element.getText();
}
PsiElement parent = type.getParent();
@@ -603,16 +600,32 @@ public class HaxeResolveUtil {
}
}
+ return null;
+ }
+
+ @Nullable
+ private static HaxeClass tryResolveClassByQNameWhenGetQNameFail(@NotNull PsiElement type) {
if (type instanceof HaxeType) {
type = ((HaxeType)type).getReferenceExpression();
}
- final String result = type.getText();
- if (result.indexOf('.') == -1) {
+ final String name = type.getText();
+ HaxeClass result = null;
+
+ //1. try searchInSamePackage, ex if type is Bar, be referenced in foo.Foo then we will find class foo.Bar
+ //note if there are 2 class: Bar & foo.Bar then we need resolve foo.Bar instead of Bar.
+ if (name != null && name.indexOf('.') == -1) {
final PsiFile psiFile = type.getContainingFile();
final PsiElement[] fileChildren = psiFile.getChildren();
- return getQName(fileChildren, result, searchInSamePackage);
+ String nameWithPackage = getQName(fileChildren, name, true);
+ result = findClassByQName(nameWithPackage, type.getContext());
}
+
+ //2. try without searchInSamePackage,
+ // ex if type is Int, be referenced in foo.Foo then we will find class has qName = Int, not foo.Int
+ // (if foo.Int exist then the prev step have aready resolved & returned it)
+ result = result != null ? result : findClassByQName(name, type.getContext());
+
return result;
}
| ['src/com/intellij/plugins/haxe/util/HaxeResolveUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,775,169 | 670,749 | 84,480 | 762 | 2,251 | 517 | 45 | 1 | 524 | 87 | 159 | 28 | 1 | 3 | 1970-01-01T00:23:47 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
421 | haxefoundation/intellij-haxe/767/764 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/764 | https://github.com/HaxeFoundation/intellij-haxe/pull/767 | https://github.com/HaxeFoundation/intellij-haxe/pull/767 | 1 | fixes | Wrong icon displayed for completions | The Haxe-logo version of the completions icon is being displayed for completion results originating from the index completion results. This should only show for completions provided by the compiler.
Version: master branch at 96338fbd0946e3a99fe9818af937c9b65bcdbafd | 96338fbd0946e3a99fe9818af937c9b65bcdbafd | 99d206e1166a4799edaf05f5ac517a5e1fe47cee | https://github.com/haxefoundation/intellij-haxe/compare/96338fbd0946e3a99fe9818af937c9b65bcdbafd...99d206e1166a4799edaf05f5ac517a5e1fe47cee | diff --git a/src/common/com/intellij/plugins/haxe/HaxeComponentType.java b/src/common/com/intellij/plugins/haxe/HaxeComponentType.java
index b7525c37..e7f79eaf 100644
--- a/src/common/com/intellij/plugins/haxe/HaxeComponentType.java
+++ b/src/common/com/intellij/plugins/haxe/HaxeComponentType.java
@@ -34,16 +34,28 @@ public enum HaxeComponentType {
public Icon getIcon() {
return icons.HaxeIcons.C_Haxe;
}
+ @Override
+ public Icon getCompletionIcon() {
+ return AllIcons.Nodes.Class;
+ }
}, ENUM(1) {
@Override
public Icon getIcon() {
return icons.HaxeIcons.E_Haxe;
}
+ @Override
+ public Icon getCompletionIcon() {
+ return AllIcons.Nodes.Enum;
+ }
}, INTERFACE(2) {
@Override
public Icon getIcon() {
return icons.HaxeIcons.I_Haxe;
}
+ @Override
+ public Icon getCompletionIcon() {
+ return AllIcons.Nodes.Interface;
+ }
}, FUNCTION(3) {
@Override
public Icon getIcon() {
@@ -93,6 +105,10 @@ public enum HaxeComponentType {
public abstract Icon getIcon();
+ public Icon getCompletionIcon() {
+ return getIcon();
+ }
+
public static boolean isVariable(@Nullable HaxeComponentType type) {
return type == VARIABLE || type == PARAMETER || type == FIELD;
}
diff --git a/src/common/com/intellij/plugins/haxe/ide/completion/HaxeClassNameCompletionContributor.java b/src/common/com/intellij/plugins/haxe/ide/completion/HaxeClassNameCompletionContributor.java
index ef468ddd..41824037 100644
--- a/src/common/com/intellij/plugins/haxe/ide/completion/HaxeClassNameCompletionContributor.java
+++ b/src/common/com/intellij/plugins/haxe/ide/completion/HaxeClassNameCompletionContributor.java
@@ -187,7 +187,7 @@ public class HaxeClassNameCompletionContributor extends CompletionContributor {
String name = pair.getFirst();
final String qName = HaxeResolveUtil.joinQName(info.getValue(), name);
myResultSet.addElement(LookupElementBuilder.create(qName, name)
- .withIcon(info.getIcon())
+ .withIcon(info.getCompletionIcon())
.withTailText(" " + info.getValue(), true)
.withInsertHandler(myInsertHandler));
}
diff --git a/src/common/com/intellij/plugins/haxe/ide/index/HaxeClassInfo.java b/src/common/com/intellij/plugins/haxe/ide/index/HaxeClassInfo.java
index c360b88e..9c6f867f 100644
--- a/src/common/com/intellij/plugins/haxe/ide/index/HaxeClassInfo.java
+++ b/src/common/com/intellij/plugins/haxe/ide/index/HaxeClassInfo.java
@@ -50,6 +50,11 @@ public class HaxeClassInfo {
return type == null ? null : type.getIcon();
}
+ @Nullable
+ public Icon getCompletionIcon() {
+ return type == null ? null : type.getCompletionIcon();
+ }
+
private int getTypeKey() {
return type != null ? type.getKey() : -1;
} | ['src/common/com/intellij/plugins/haxe/HaxeComponentType.java', 'src/common/com/intellij/plugins/haxe/ide/index/HaxeClassInfo.java', 'src/common/com/intellij/plugins/haxe/ide/completion/HaxeClassNameCompletionContributor.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 2,822,724 | 667,335 | 84,959 | 627 | 605 | 122 | 23 | 3 | 269 | 34 | 64 | 3 | 0 | 0 | 1970-01-01T00:25:20 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
422 | haxefoundation/intellij-haxe/742/741 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/741 | https://github.com/HaxeFoundation/intellij-haxe/pull/742 | https://github.com/HaxeFoundation/intellij-haxe/pull/742 | 1 | closes | Wrong resolution order. Import alias should be resolved before package name. | ```haxe
package sample;
class Foo {}
```
```haxe
import something.Bar.instance as sample;
public static function main() {
sample.doThings();
}
```
`sample` should be resolved as `something.Bar.instance` not as package `sample`. | d2a60bf6eac3c7e5726414b8c665cdec93a6da86 | 436696fbe9cb214d1c1b75aa6f15f0d05bab1766 | https://github.com/haxefoundation/intellij-haxe/compare/d2a60bf6eac3c7e5726414b8c665cdec93a6da86...436696fbe9cb214d1c1b75aa6f15f0d05bab1766 | diff --git a/src/common/com/intellij/plugins/haxe/lang/psi/HaxeResolver.java b/src/common/com/intellij/plugins/haxe/lang/psi/HaxeResolver.java
index 6b6d5388..604d2395 100644
--- a/src/common/com/intellij/plugins/haxe/lang/psi/HaxeResolver.java
+++ b/src/common/com/intellij/plugins/haxe/lang/psi/HaxeResolver.java
@@ -75,6 +75,21 @@ public class HaxeResolver implements ResolveCache.AbstractResolver<HaxeReference
return superElements;
}
+ HaxeFileModel fileModel = HaxeFileModel.fromElement(reference);
+ if (fileModel != null) {
+ String className = reference.getText();
+
+ PsiElement target = HaxeResolveUtil.searchInSameFile(fileModel, className);
+ if (target == null) target = HaxeResolveUtil.searchInImports(fileModel, className);
+ if (target == null) target = HaxeResolveUtil.searchInSamePackage(fileModel, className);
+
+ if (target != null) {
+ return asList(target);
+ }
+ }
+
+ LogResolution(reference, "failed after exhausting all options.");
+
if (PsiNameHelper.getInstance(reference.getProject()).isQualifiedName(reference.getText())) {
List<HaxeModel> resolvedPackage =
HaxeProjectModel.fromElement(reference).resolve(new FullyQualifiedInfo(reference.getText()), reference.getResolveScope());
@@ -83,19 +98,6 @@ public class HaxeResolver implements ResolveCache.AbstractResolver<HaxeReference
return Collections.singletonList(resolvedPackage.get(0).getBasePsi());
}
}
-
- HaxeFileModel fileModel = HaxeFileModel.fromElement(reference);
- String className = reference.getText();
-
- PsiElement target = HaxeResolveUtil.searchInSameFile(fileModel, className);
- if (target == null) target = HaxeResolveUtil.searchInImports(fileModel, className);
- if (target == null) target = HaxeResolveUtil.searchInSamePackage(fileModel, className);
-
- if (target != null) {
- return asList(target);
- }
-
- LogResolution(reference, "failed after exhausting all options.");
}
return result == null ? ContainerUtil.emptyList() : result; | ['src/common/com/intellij/plugins/haxe/lang/psi/HaxeResolver.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,768,161 | 655,321 | 83,343 | 614 | 1,130 | 236 | 28 | 1 | 245 | 30 | 56 | 13 | 0 | 2 | 1970-01-01T00:25:17 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
423 | haxefoundation/intellij-haxe/330/323 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/323 | https://github.com/HaxeFoundation/intellij-haxe/pull/330 | https://github.com/HaxeFoundation/intellij-haxe/pull/330 | 2 | fix | Modifications to the /gen/ tree have to be reverted and non-generated functions must be integrated differently. | Changes 1f7684ee931708f59567fbb48f1fc421e848986d, 1f7684ee931708f59567fbb48f1fc421e848986d, and 1f7684ee931708f59567fbb48f1fc421e848986d introduced manual changes to the generated parser files. These need to be reverted and the new code added, either to a mixin super-class or using another mechanism.
Primarily the functions that need to be removed are the getModel() methods added for semantic annotation.
TIR: STB-11374
| a077c3420c2cb9fffe202e7af82661bf73947cbc | 44ec346e94c1937c3ed55bcf45d16ace676cad10 | https://github.com/haxefoundation/intellij-haxe/compare/a077c3420c2cb9fffe202e7af82661bf73947cbc...44ec346e94c1937c3ed55bcf45d16ace676cad10 | diff --git a/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclaration.java b/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclaration.java
index bab8f5dd..67e43298 100644
--- a/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclaration.java
+++ b/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclaration.java
@@ -20,14 +20,10 @@
package com.intellij.plugins.haxe.lang.psi;
import java.util.List;
-
-import com.intellij.plugins.haxe.model.HaxeFieldModel;
-import com.intellij.plugins.haxe.model.HaxeMethodModel;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface HaxeVarDeclaration extends HaxePsiField {
- HaxeFieldModel getModel();
@NotNull
List<HaxeAutoBuildMacro> getAutoBuildMacroList();
diff --git a/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclarationPart.java b/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclarationPart.java
index f4efcb61..813fb1a1 100644
--- a/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclarationPart.java
+++ b/gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclarationPart.java
@@ -20,12 +20,11 @@
package com.intellij.plugins.haxe.lang.psi;
import java.util.List;
-
-import com.intellij.plugins.haxe.model.HaxeFieldModel;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface HaxeVarDeclarationPart extends HaxePsiField {
+
@NotNull
HaxeComponentName getComponentName();
diff --git a/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeExpressionListImpl.java b/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeExpressionListImpl.java
index bed75f2b..1fb8676f 100644
--- a/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeExpressionListImpl.java
+++ b/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeExpressionListImpl.java
@@ -19,14 +19,15 @@
// This is a generated file. Not intended for manual editing.
package com.intellij.plugins.haxe.lang.psi.impl;
+import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
+import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
+import static com.intellij.plugins.haxe.lang.lexer.HaxeTokenTypes.*;
import com.intellij.plugins.haxe.lang.psi.*;
-import java.util.List;
-
public class HaxeExpressionListImpl extends HaxePsiCompositeElementImpl implements HaxeExpressionList {
public HaxeExpressionListImpl(ASTNode node) {
diff --git a/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeVarDeclarationImpl.java b/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeVarDeclarationImpl.java
index 248cb2cb..5428c326 100644
--- a/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeVarDeclarationImpl.java
+++ b/gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeVarDeclarationImpl.java
@@ -20,9 +20,6 @@
package com.intellij.plugins.haxe.lang.psi.impl;
import java.util.List;
-
-import com.intellij.plugins.haxe.model.HaxeFieldModel;
-import com.intellij.plugins.haxe.model.HaxeMethodModel;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
@@ -42,11 +39,6 @@ public class HaxeVarDeclarationImpl extends HaxePsiFieldImpl implements HaxeVarD
else super.accept(visitor);
}
- @Override
- public HaxeFieldModel getModel() {
- return new HaxeFieldModel(this);
- }
-
@Override
@NotNull
public List<HaxeAutoBuildMacro> getAutoBuildMacroList() {
diff --git a/src/common/com/intellij/plugins/haxe/ide/annotator/HaxeSemanticAnnotator.java b/src/common/com/intellij/plugins/haxe/ide/annotator/HaxeSemanticAnnotator.java
index 6a9764fa..38de3921 100644
--- a/src/common/com/intellij/plugins/haxe/ide/annotator/HaxeSemanticAnnotator.java
+++ b/src/common/com/intellij/plugins/haxe/ide/annotator/HaxeSemanticAnnotator.java
@@ -104,7 +104,7 @@ class TypeTagChecker {
class FieldChecker {
public static void check(final HaxeVarDeclaration var, final AnnotationHolder holder) {
- HaxeFieldModel field = var.getModel();
+ HaxeFieldModel field = new HaxeFieldModel(var);
if (field.isProperty()) {
checkProperty(field, holder);
}
diff --git a/src/common/com/intellij/plugins/haxe/model/HaxeClassModel.java b/src/common/com/intellij/plugins/haxe/model/HaxeClassModel.java
index b410131e..d0de5d21 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxeClassModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxeClassModel.java
@@ -206,7 +206,7 @@ public class HaxeClassModel {
public HaxeFieldModel getField(String name) {
HaxeVarDeclaration name1 = (HaxeVarDeclaration)haxeClass.findHaxeFieldByName(name);
- return name1 != null ? name1.getModel() : null;
+ return name1 != null ? new HaxeFieldModel(name1) : null;
}
public HaxeMethodModel getMethod(String name) {
diff --git a/src/common/com/intellij/plugins/haxe/model/HaxeMemberModel.java b/src/common/com/intellij/plugins/haxe/model/HaxeMemberModel.java
index b737f157..59d4b39a 100644
--- a/src/common/com/intellij/plugins/haxe/model/HaxeMemberModel.java
+++ b/src/common/com/intellij/plugins/haxe/model/HaxeMemberModel.java
@@ -90,7 +90,7 @@ abstract public class HaxeMemberModel {
public static HaxeMemberModel fromPsi(PsiElement element) {
if (element instanceof HaxeMethod) return ((HaxeMethod)element).getModel();
- if (element instanceof HaxeVarDeclaration) return ((HaxeVarDeclaration)element).getModel();
+ if (element instanceof HaxeVarDeclaration) return new HaxeFieldModel((HaxeVarDeclaration)element);
final PsiElement parent = element.getParent();
return (parent != null) ? fromPsi(parent) : null;
} | ['src/common/com/intellij/plugins/haxe/model/HaxeMemberModel.java', 'gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclaration.java', 'gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeVarDeclarationImpl.java', 'src/common/com/intellij/plugins/haxe/ide/annotator/HaxeSemanticAnnotator.java', 'src/common/com/intellij/plugins/haxe/model/HaxeClassModel.java', 'gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeExpressionListImpl.java', 'gen/com/intellij/plugins/haxe/lang/psi/HaxeVarDeclarationPart.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 3,147,113 | 749,488 | 95,117 | 833 | 979 | 228 | 26 | 7 | 426 | 50 | 130 | 6 | 0 | 0 | 1970-01-01T00:23:56 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
424 | haxefoundation/intellij-haxe/274/136 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/136 | https://github.com/HaxeFoundation/intellij-haxe/pull/274 | https://github.com/HaxeFoundation/intellij-haxe/pull/274 | 1 | fix | [FIND USAGE] incorrect reference verification for COMPONENT_NAME elements | suppose following code:
```
var foo = new Sprite();
trace(foo);
trace(foo);
```
If you place cursor on second or third line and try to find usages, both usages will be found. But if you'll try to do the same for the first line (in the variable declaration) - no usages will be found.
The problem here is that "var foo" is a reference to PsiElement with token type "COMPONENT_NAME". t
The foo in "trace(foo);", are references to PsiElement with token type "LOCAL_VAR_DECLARATION_PART", which is parent of element type referenced by "var foo".
In this way elements do not match, and the usages is not found (also refactor->rename doesn't work). I really think that this is a very major bug.
I've fixed problem in HaxeReferenceImpl::isReferenceTo, by changing the last row
```
return !chain && resolve == element;
```
to
```
if (element instanceof HaxeComponentName) {
return !chain && resolve == element.getParent();
}else {
return !chain && resolve == element;
}
```
TIR: STB-9359
but I'm not sure if it's a good way to fix this issue.
| e5b54e219a72b54a39eb188626cfdda6c8d00c8f | e62cc64d75e92b3eaf4126fc849f73a7087242b5 | https://github.com/haxefoundation/intellij-haxe/compare/e5b54e219a72b54a39eb188626cfdda6c8d00c8f...e62cc64d75e92b3eaf4126fc849f73a7087242b5 | diff --git a/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiFieldImpl.java b/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiFieldImpl.java
index 5000e2ec..dbb10b41 100644
--- a/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiFieldImpl.java
+++ b/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiFieldImpl.java
@@ -138,7 +138,7 @@ public abstract class HaxePsiFieldImpl extends AbstractHaxeNamedComponent implem
final HaxeType type = (toa != null) ? toa.getType() : null;
psiType = (type != null) ? type.getPsiType() : null;
}
- return psiType;
+ return psiType != null ? psiType : HaxePsiTypeAdapter.DYNAMIC;
}
@Nullable
diff --git a/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiTypeAdapter.java b/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiTypeAdapter.java
index 85c096d7..da0471f1 100644
--- a/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiTypeAdapter.java
+++ b/src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiTypeAdapter.java
@@ -54,11 +54,13 @@ import javax.swing.*;
*/
public class HaxePsiTypeAdapter extends PsiType implements HaxeType {
- Logger LOG = Logger.getInstance("#com.intellij.plugins.haxe.lang.psi.impl.HaxePsiTypeAdapter");
- {
+ static final Logger LOG = Logger.getInstance("#com.intellij.plugins.haxe.lang.psi.impl.HaxePsiTypeAdapter");
+ static {
LOG.setLevel(Level.DEBUG);
}
+ public static final PsiPrimitiveType DYNAMIC = new PsiPrimitiveType("Dynamic", PsiAnnotation.EMPTY_ARRAY);
+
HaxeType myType = null;
public HaxePsiTypeAdapter(@NotNull HaxeType haxeType) { | ['src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiTypeAdapter.java', 'src/common/com/intellij/plugins/haxe/lang/psi/impl/HaxePsiFieldImpl.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 2,933,919 | 701,692 | 88,860 | 781 | 427 | 94 | 8 | 2 | 1,068 | 175 | 266 | 35 | 0 | 3 | 1970-01-01T00:23:52 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
415 | haxefoundation/intellij-haxe/326/325 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/325 | https://github.com/HaxeFoundation/intellij-haxe/pull/326 | https://github.com/HaxeFoundation/intellij-haxe/issues/325#issuecomment-117947839 | 1 | fixes | Stack frame does not include fields from "this" when debugging. | When debugging, there is a race condition (particularly with IDEA 14, which introduces "auto-expanding") where the variable lists are calculated and drawn before the data is available from the debugger. This occurs more frequently with large projects. The "this" pointer is particularly vulnerable to the error.
IDEA 14, 14.1
Plugin versions 0.9.3, 0.9.4..
JDK 1.6
OS: Linux
TIR: STB-10938
| 819e1ab8df3622206cc088a85a457f2a1588be90 | 328a28a930c53122667dbc6d92d5a71e7bb2214c | https://github.com/haxefoundation/intellij-haxe/compare/819e1ab8df3622206cc088a85a457f2a1588be90...328a28a930c53122667dbc6d92d5a71e7bb2214c | diff --git a/src/13.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java b/src/13.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
index 39b05df3..b496b696 100644
--- a/src/13.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
+++ b/src/13.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
@@ -63,6 +63,7 @@ import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.evaluation.XDebuggerEvaluator;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.impl.XSourcePositionImpl;
+import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl;
import haxe.root.JavaProtocol;
import org.jetbrains.annotations.NotNull;
@@ -1113,8 +1114,17 @@ public class HaxeDebugRunner extends DefaultProgramRunner
mIcon = AllIcons.General.Error;
mValue = mType = "<Unavailable>";
}
-
- Value.this.computePresentation(node, place);
+
+ // If fromStructuredValue contained a list, we need to add all items to the node.
+ if (null != mChildren) {
+ XValueChildrenList childrenList = new XValueChildrenList();
+ for (Value v : mChildren) {
+ childrenList.add(v.mName, v);
+ }
+ ((XValueNodeImpl)node).addChildren(childrenList, false);
+ }
+
+ Value.this.computePresentation(node, place);
}
});
}
diff --git a/src/14.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java b/src/14.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
index 1c4dabbd..5d0075bf 100644
--- a/src/14.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
+++ b/src/14.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
@@ -63,6 +63,7 @@ import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.evaluation.XDebuggerEvaluator;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.impl.XSourcePositionImpl;
+import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl;
import haxe.root.JavaProtocol;
import org.jetbrains.annotations.NotNull;
@@ -1014,6 +1015,15 @@ public class HaxeDebugRunner extends DefaultProgramRunner {
mValue = mType = "<Unavailable>";
}
+ // If fromStructuredValue contained a list, we need to add all items to the node.
+ if (null != mChildren) {
+ XValueChildrenList childrenList = new XValueChildrenList();
+ for (Value v : mChildren) {
+ childrenList.add(v.mName, v);
+ }
+ ((XValueNodeImpl)node).addChildren(childrenList, false);
+ }
+
Value.this.computePresentation(node, place);
}
});
diff --git a/src/14/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java b/src/14/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
index f687a877..418817dc 100644
--- a/src/14/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
+++ b/src/14/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java
@@ -63,6 +63,7 @@ import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.evaluation.XDebuggerEvaluator;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.impl.XSourcePositionImpl;
+import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl;
import haxe.root.JavaProtocol;
import org.jetbrains.annotations.NotNull;
@@ -1014,6 +1015,15 @@ public class HaxeDebugRunner extends DefaultProgramRunner {
mValue = mType = "<Unavailable>";
}
+ // If fromStructuredValue contained a list, we need to add all items to the node.
+ if (null != mChildren) {
+ XValueChildrenList childrenList = new XValueChildrenList();
+ for (Value v : mChildren) {
+ childrenList.add(v.mName, v);
+ }
+ ((XValueNodeImpl)node).addChildren(childrenList, false);
+ }
+
Value.this.computePresentation(node, place);
}
}); | ['src/14.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java', 'src/14/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java', 'src/13.1/com/intellij/plugins/haxe/runner/debugger/HaxeDebugRunner.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 3,142,283 | 748,530 | 95,018 | 833 | 1,832 | 314 | 34 | 3 | 394 | 59 | 107 | 9 | 0 | 0 | 1970-01-01T00:23:55 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
427 | haxefoundation/intellij-haxe/133/132 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/132 | https://github.com/HaxeFoundation/intellij-haxe/pull/133 | https://github.com/HaxeFoundation/intellij-haxe/pull/133 | 2 | fixed | Imports: incorrect processing for duplicated imports | suppose code:
```
import somepack.classA;
import somepack.classA;
import somepack.classB;
```
Expected result:
Only second import statement is marked as unused.
Actual result:
Both second and third import statements are marked as unused.
| 96f14b77ae87d0df232c35c7c225ceeb29f8b4c9 | f0cd4561d888c1558bb0f4b4c0c8ebf7cbaf6210 | https://github.com/haxefoundation/intellij-haxe/compare/96f14b77ae87d0df232c35c7c225ceeb29f8b4c9...f0cd4561d888c1558bb0f4b4c0c8ebf7cbaf6210 | diff --git a/src/com/intellij/plugins/haxe/util/HaxeImportUtil.java b/src/com/intellij/plugins/haxe/util/HaxeImportUtil.java
index 6a23390a..44646577 100644
--- a/src/com/intellij/plugins/haxe/util/HaxeImportUtil.java
+++ b/src/com/intellij/plugins/haxe/util/HaxeImportUtil.java
@@ -59,9 +59,10 @@ public class HaxeImportUtil {
}
});
- boolean alreadyAdded = false;
for (int i = 0; i < usefulImportStatements.size(); i++) {
+ boolean alreadyAdded = false;
+
for (int j = 0; j < filteredUsefulImports.size(); j++) {
if (usefulImportStatements.get(i).getReferenceExpression().getText().equals(
filteredUsefulImports.get(j).getReferenceExpression().getText())) {
@@ -151,9 +152,10 @@ public class HaxeImportUtil {
}
});
- boolean alreadyAdded = false;
for (int i = 0; i < usefulImportStatementWithInSupports.size(); i++) {
+ boolean alreadyAdded = false;
+
for (int j = 0; j < filteredUsefulImports.size(); j++) {
if (usefulImportStatementWithInSupports.get(i).getReferenceExpression().getText().equals(
filteredUsefulImports.get(j).getReferenceExpression().getText())
@@ -249,9 +251,11 @@ public class HaxeImportUtil {
}
});
- boolean alreadyAdded = false;
for (int i = 0; i < usefulImportStatementWithInSupports.size(); i++) {
+
+ boolean alreadyAdded = false;
+
for (int j = 0; j < filteredUsefulImports.size(); j++) {
if (usefulImportStatementWithInSupports.get(i).getReferenceExpression().getText().equals(
filteredUsefulImports.get(j).getReferenceExpression().getText())) { | ['src/com/intellij/plugins/haxe/util/HaxeImportUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,158,133 | 522,162 | 65,812 | 627 | 223 | 42 | 10 | 1 | 243 | 32 | 52 | 16 | 0 | 1 | 1970-01-01T00:23:45 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
426 | haxefoundation/intellij-haxe/135/134 | haxefoundation | intellij-haxe | https://github.com/HaxeFoundation/intellij-haxe/issues/134 | https://github.com/HaxeFoundation/intellij-haxe/pull/135 | https://github.com/HaxeFoundation/intellij-haxe/pull/135 | 2 | fixed | [CODE REFORMAT] incorrect reformat for object and array children | suppose you have following code:
```
var arr = [{a:1},{a:2},{a:3}];
```
You could separate it to several lines of code. In this case you expect code like this:
```
var arr = [
{
a:1
},
{
a:2
},
{
a:3
}
];
```
actually after reformat the result is like this:
```
var arr = [
{
a:1
},
{
a:2
},
{
a:3
}
];
```
| 79ff9250d688b2f5bbdf7fe5fdd60e07b44db893 | 2482acaa6b2bb814a36429278b812afd376bbb84 | https://github.com/haxefoundation/intellij-haxe/compare/79ff9250d688b2f5bbdf7fe5fdd60e07b44db893...2482acaa6b2bb814a36429278b812afd376bbb84 | diff --git a/src/com/intellij/plugins/haxe/ide/formatter/HaxeIndentProcessor.java b/src/com/intellij/plugins/haxe/ide/formatter/HaxeIndentProcessor.java
index f46c63f0..2ef00d91 100644
--- a/src/com/intellij/plugins/haxe/ide/formatter/HaxeIndentProcessor.java
+++ b/src/com/intellij/plugins/haxe/ide/formatter/HaxeIndentProcessor.java
@@ -79,7 +79,7 @@ public class HaxeIndentProcessor {
}
return Indent.getNormalIndent();
}
- if (needIndent(parentType)) {
+ if (needIndent(parentType, elementType)) {
final PsiElement psi = node.getPsi();
if (psi.getParent() instanceof PsiFile) {
return Indent.getNoneIndent();
@@ -114,12 +114,15 @@ public class HaxeIndentProcessor {
return Indent.getNoneIndent();
}
- private static boolean needIndent(@Nullable IElementType type) {
+ private static boolean needIndent(@Nullable IElementType type, IElementType elementType) {
if (type == null) {
return false;
}
boolean result = type == BLOCK_STATEMENT;
result = result || type == CLASS_BODY;
+ result = result || (type == ARRAY_LITERAL && elementType != PLBRACK && elementType != PRBRACK);
+ result = result || type == OBJECT_LITERAL;
+ result = result || type == CLASS_BODY;
result = result || type == EXTERN_CLASS_DECLARATION_BODY;
result = result || type == ENUM_BODY;
result = result || type == INTERFACE_BODY; | ['src/com/intellij/plugins/haxe/ide/formatter/HaxeIndentProcessor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,160,775 | 522,676 | 65,900 | 627 | 437 | 93 | 7 | 1 | 360 | 68 | 119 | 38 | 0 | 3 | 1970-01-01T00:23:45 | 203 | Java | {'Java': 3257966, 'Haxe': 447777, 'Lex': 31640, 'Kotlin': 13197, 'HTML': 5214, 'Makefile': 458, 'Shell': 413, 'Batchfile': 200} | Apache License 2.0 |
1,316 | smallrye/smallrye-reactive-messaging/863/862 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/862 | https://github.com/smallrye/smallrye-reactive-messaging/pull/863 | https://github.com/smallrye/smallrye-reactive-messaging/pull/863 | 1 | fix | Unable to use Kafka interceptors while using DLQ | If we configure a channel with a Kafka interceptor and with a DLQ, the DLQ producer fails to create with ` java.lang.ClassCastException: class io.opentracing.contrib.kafka.TracingConsumerInterceptor`.
```
2020-11-09 14:45:48,030 ERROR [io.sma.rea.mes.provider] (Quarkus Main Thread) SRMSG00230: Unable to create the publisher or subscriber during initialization: org.apache.kafka.common.KafkaException: Failed to construct kafka producer
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:434)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:270)
at io.vertx.kafka.client.producer.impl.KafkaWriteStreamImpl.create(KafkaWriteStreamImpl.java:52)
at io.vertx.kafka.client.producer.KafkaWriteStream.create(KafkaWriteStream.java:92)
at io.vertx.kafka.client.producer.KafkaProducer.create(KafkaProducer.java:149)
at io.vertx.mutiny.kafka.client.producer.KafkaProducer.create(KafkaProducer.java:142)
at io.smallrye.reactive.messaging.kafka.fault.KafkaDeadLetterQueue.create(KafkaDeadLetterQueue.java:52)
at io.smallrye.reactive.messaging.kafka.impl.KafkaSource.createFailureHandler(KafkaSource.java:358)
at io.smallrye.reactive.messaging.kafka.impl.KafkaSource.<init>(KafkaSource.java:217)
at io.smallrye.reactive.messaging.kafka.KafkaConnector.getPublisherBuilder(KafkaConnector.java:150)
at io.smallrye.reactive.messaging.kafka.KafkaConnector_ClientProxy.getPublisherBuilder(KafkaConnector_ClientProxy.zig:305)
at io.smallrye.reactive.messaging.impl.ConfiguredChannelFactory.createPublisherBuilder(ConfiguredChannelFactory.java:188)
at io.smallrye.reactive.messaging.impl.ConfiguredChannelFactory.register(ConfiguredChannelFactory.java:153)
at io.smallrye.reactive.messaging.impl.ConfiguredChannelFactory.initialize(ConfiguredChannelFactory.java:125)
at io.smallrye.reactive.messaging.impl.ConfiguredChannelFactory_ClientProxy.initialize(ConfiguredChannelFactory_ClientProxy.zig:217)
at java.base/java.util.Iterator.forEachRemaining(Iterator.java:133)
at java.base/java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at io.smallrye.reactive.messaging.extension.MediatorManager.initializeAndRun(MediatorManager.java:156)
at io.smallrye.reactive.messaging.extension.MediatorManager_ClientProxy.initializeAndRun(MediatorManager_ClientProxy.zig:277)
at io.quarkus.smallrye.reactivemessaging.runtime.SmallRyeReactiveMessagingLifecycle.onApplicationStart(SmallRyeReactiveMessagingLifecycle.java:20)
at io.quarkus.smallrye.reactivemessaging.runtime.SmallRyeReactiveMessagingLifecycle_Observer_onApplicationStart_4e8937813d9e8faff65c3c07f88fa96615b70e70.notify(SmallRyeReactiveMessagingLifecycle_Observer_onApplicationStart_4e8937813d9e8faff65c3c07f88fa96615b70e70.zig:111)
at io.quarkus.arc.impl.EventImpl$Notifier.notifyObservers(EventImpl.java:282)
at io.quarkus.arc.impl.EventImpl$Notifier.notify(EventImpl.java:267)
at io.quarkus.arc.impl.EventImpl.fire(EventImpl.java:69)
at io.quarkus.arc.runtime.LifecycleEventRunner.fireStartupEvent(LifecycleEventRunner.java:23)
at io.quarkus.arc.runtime.ArcRecorder.handleLifecycleEvents(ArcRecorder.java:60)
at io.quarkus.deployment.steps.LifecycleEventsBuildStep$startupEvent-858218658.deploy_0(LifecycleEventsBuildStep$startupEvent-858218658.zig:81)
at io.quarkus.deployment.steps.LifecycleEventsBuildStep$startupEvent-858218658.deploy(LifecycleEventsBuildStep$startupEvent-858218658.zig:40)
at io.quarkus.runner.ApplicationImpl.doStart(ApplicationImpl.zig:488)
at io.quarkus.runtime.Application.start(Application.java:90)
at io.quarkus.runtime.ApplicationLifecycleManager.run(ApplicationLifecycleManager.java:95)
at io.quarkus.runtime.Quarkus.run(Quarkus.java:62)
at io.quarkus.runtime.Quarkus.run(Quarkus.java:38)
at io.quarkus.runtime.Quarkus.run(Quarkus.java:104)
at io.quarkus.runner.GeneratedMain.main(GeneratedMain.zig:29)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:64)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:564)
at io.quarkus.runner.bootstrap.StartupActionImpl$3.run(StartupActionImpl.java:134)
at java.base/java.lang.Thread.run(Thread.java:832)
Caused by: java.lang.ClassCastException: class io.opentracing.contrib.kafka.TracingConsumerInterceptor
at java.base/java.lang.Class.asSubclass(Class.java:3851)
at org.apache.kafka.common.utils.Utils.loadClass(Utils.java:335)
at org.apache.kafka.common.utils.Utils.newInstance(Utils.java:324)
at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstance(AbstractConfig.java:365)
at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstances(AbstractConfig.java:436)
at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstances(AbstractConfig.java:417)
at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstances(AbstractConfig.java:404)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:378)
... 41 more
```
Reproducer:
[kafka-interceptor-issue.zip](https://github.com/smallrye/smallrye-reactive-messaging/files/5510879/kafka-interceptor-issue.zip)
| ff3a0472685bbd713c8c95e3af0df11741bc0f35 | 4bc6f51cc6c26115cc2ae3ac6e83f56fb7033cc3 | https://github.com/smallrye/smallrye-reactive-messaging/compare/ff3a0472685bbd713c8c95e3af0df11741bc0f35...4bc6f51cc6c26115cc2ae3ac6e83f56fb7033cc3 | diff --git a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
index e69dc3e43..047fa1d00 100644
--- a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
+++ b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
@@ -2,8 +2,7 @@ package io.smallrye.reactive.messaging.kafka.fault;
import static io.smallrye.reactive.messaging.kafka.i18n.KafkaLogging.log;
import static org.apache.kafka.clients.CommonClientConfigs.CLIENT_ID_CONFIG;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.*;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
@@ -49,6 +48,10 @@ public class KafkaDeadLetterQueue implements KafkaFailureHandler {
Map<String, String> deadQueueProducerConfig = new HashMap<>(kafkaConfiguration);
String keyDeserializer = deadQueueProducerConfig.remove(KEY_DESERIALIZER_CLASS_CONFIG);
String valueDeserializer = deadQueueProducerConfig.remove(VALUE_DESERIALIZER_CLASS_CONFIG);
+
+ // We need to remove consumer interceptor
+ deadQueueProducerConfig.remove(INTERCEPTOR_CLASSES_CONFIG);
+
deadQueueProducerConfig.put(KEY_SERIALIZER_CLASS_CONFIG,
conf.getDeadLetterQueueKeySerializer().orElse(getMirrorSerializer(keyDeserializer)));
deadQueueProducerConfig.put(VALUE_SERIALIZER_CLASS_CONFIG,
diff --git a/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java b/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
index 4fb8b4cc2..9b49c4a49 100644
--- a/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
+++ b/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
@@ -4,10 +4,7 @@ import static io.smallrye.reactive.messaging.kafka.fault.KafkaDeadLetterQueue.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.UUID;
+import java.util.*;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
@@ -18,11 +15,10 @@ import java.util.function.Function;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Observes;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.OffsetResetStrategy;
+import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
@@ -229,6 +225,45 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
assertThat(bean.producers()).isEqualTo(1);
}
+ @Test
+ public void testDeadLetterQueueStrategyWithInterceptor() {
+ List<ConsumerRecord<String, Integer>> records = new CopyOnWriteArrayList<>();
+ String randomId = UUID.randomUUID().toString();
+
+ usage.consume(randomId, randomId, OffsetResetStrategy.EARLIEST,
+ new StringDeserializer(), new IntegerDeserializer(), () -> records.size() < 3, null, null,
+ Collections.singletonList("dead-letter-topic-kafka-itcp"), records::add);
+
+ MyReceiverBeanUsingPayload bean = runApplication(
+ getDeadLetterQueueWithCustomConfig("dq-itcp", "dead-letter-topic-kafka-itcp")
+ .with("mp.messaging.incoming.kafka.interceptor.classes", IdentityInterceptor.class.getName()),
+ MyReceiverBeanUsingPayload.class);
+ await().until(this::isReady);
+
+ AtomicInteger counter = new AtomicInteger();
+ new Thread(() -> usage.produceIntegers(10, null,
+ () -> new ProducerRecord<>("dq-itcp", counter.getAndIncrement()))).start();
+
+ await().atMost(2, TimeUnit.MINUTES).until(() -> bean.list().size() >= 10);
+ assertThat(bean.list()).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
+
+ await().atMost(2, TimeUnit.MINUTES).until(() -> records.size() == 3);
+ assertThat(records).allSatisfy(r -> {
+ assertThat(r.topic()).isEqualTo("dead-letter-topic-kafka-itcp");
+ assertThat(r.value()).isIn(3, 6, 9);
+ assertThat(new String(r.headers().lastHeader(DEAD_LETTER_REASON).value())).startsWith("nack 3 -");
+ assertThat(r.headers().lastHeader(DEAD_LETTER_CAUSE)).isNull();
+ assertThat(new String(r.headers().lastHeader(DEAD_LETTER_PARTITION).value())).isEqualTo("0");
+ assertThat(new String(r.headers().lastHeader(DEAD_LETTER_TOPIC).value())).isEqualTo("dq-itcp");
+ assertThat(new String(r.headers().lastHeader(DEAD_LETTER_OFFSET).value())).isNotNull().isIn("3", "6", "9");
+ });
+
+ assertThat(isAlive()).isTrue();
+
+ assertThat(bean.consumers()).isEqualTo(1);
+ assertThat(bean.producers()).isEqualTo(1);
+ }
+
@Test
public void testDeadLetterQueueStrategyWithCustomConfig() {
List<ConsumerRecord<String, Integer>> records = new CopyOnWriteArrayList<>();
@@ -397,4 +432,28 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
return observedProducerEvents.sum();
}
}
+
+ public static class IdentityInterceptor<K, V> implements ConsumerInterceptor<K, V> {
+ @Override
+ public ConsumerRecords<K, V> onConsume(
+ ConsumerRecords<K, V> records) {
+ return records;
+ }
+
+ @Override
+ public void onCommit(
+ Map<TopicPartition, OffsetAndMetadata> offsets) {
+
+ }
+
+ @Override
+ public void close() {
+
+ }
+
+ @Override
+ public void configure(Map<String, ?> configs) {
+
+ }
+ }
} | ['smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java', 'smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,018,032 | 204,216 | 27,735 | 366 | 382 | 67 | 7 | 1 | 5,816 | 160 | 1,326 | 62 | 1 | 1 | 1970-01-01T00:26:44 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,313 | smallrye/smallrye-reactive-messaging/1668/1662 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/1662 | https://github.com/smallrye/smallrye-reactive-messaging/pull/1668 | https://github.com/smallrye/smallrye-reactive-messaging/pull/1668 | 1 | fix | AMQP - when using multiple Emitters intermitently fails to retrieve credits on startup | **Scenario**
Quarkus application using SRM AMQP 3.15.0
Application uses Emitters to send messages to multiple Queues on the same Azure Service Bus
**Problem**
On startup, SRM silently fails to obtain credits for **some** of the channels. This is sporadic but frequent.
This does not cause the startup to fail. It also does not recover.
Attempts to send messages using these Emitters fail.
**Reproducer**
git clone https://bitbucket.org/alexlitovsky1/srm-amqp-credits-bug.git
**Steps**
1. mvn quarkus:dev
2. Watch the output for these lines `DEBUG [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16222: Retrieved credits for channel testX: 1000`
3. There should be 3 of them: `test1`, `test2`, and `test3`. Most of the time you will only see a subset of those acquire credits.
**Output from an error scenario**
Credits were obtained for test2 and test3. Not for test1
```
2022-03-11 09:34:06,595 FINE [org.apa.qpi.pro.eng.imp.SaslImpl] (vert.x-eventloop-thread-0) Handled outcome: SaslImpl [_outcome=PN_SASL_OK, state=PN_SASL_PASS, done=true, role=CLIENT]
2022-03-11 09:34:06,600 FINE [org.apa.qpi.pro.eng.imp.SaslImpl] (vert.x-eventloop-thread-0) Handled outcome: SaslImpl [_outcome=PN_SASL_OK, state=PN_SASL_PASS, done=true, role=CLIENT]
2022-03-11 09:34:06,601 FINE [org.apa.qpi.pro.eng.imp.SaslImpl] (vert.x-eventloop-thread-0) Handled outcome: SaslImpl [_outcome=PN_SASL_OK, state=PN_SASL_PASS, done=true, role=CLIENT]
2022-03-11 09:34:06,612 INFO [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16213: Connection with AMQP broker established
2022-03-11 09:34:06,619 INFO [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16213: Connection with AMQP broker established
2022-03-11 09:34:06,623 INFO [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16213: Connection with AMQP broker established
2022-03-11 09:34:06,640 DEBUG [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16222: Retrieved credits for channel `test3`: 1000
2022-03-11 09:34:06,641 DEBUG [io.sma.rea.mes.amqp] (vert.x-eventloop-thread-0) SRMSG16222: Retrieved credits for channel `test2`: 1000
2022-03-11 09:34:12,022 ERROR [io.qua.sch.run.SimpleScheduler] (executor-thread-0) Error occured while executing task for trigger IntervalTrigger [id=1_com.alexlitovsky.bugs.amqp.MessageSender_ScheduledInvoker_sendMessage_28ac1eed06c7507f1cfe170b7c9ab14464f2f6dc, interval=60000]: java.lang.RuntimeException: Timeout sending to emitter1
at com.alexlitovsky.bugs.amqp.MessageSender.sendMessage(MessageSender.java:46)
at com.alexlitovsky.bugs.amqp.MessageSender.sendMessage(MessageSender.java:36)
at com.alexlitovsky.bugs.amqp.MessageSender_Subclass.sendMessage$$superforward1(Unknown Source)
at com.alexlitovsky.bugs.amqp.MessageSender_Subclass$$function$$1.apply(Unknown Source)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:54)
at io.quarkus.arc.runtime.devconsole.InvocationInterceptor.proceed(InvocationInterceptor.java:62)
at io.quarkus.arc.runtime.devconsole.InvocationInterceptor.monitor(InvocationInterceptor.java:49)
at io.quarkus.arc.runtime.devconsole.InvocationInterceptor_Bean.intercept(Unknown Source)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.perform(AroundInvokeInvocationContext.java:41)
at io.quarkus.arc.impl.InvocationContexts.performAroundInvoke(InvocationContexts.java:32)
at com.alexlitovsky.bugs.amqp.MessageSender_Subclass.sendMessage(Unknown Source)
at com.alexlitovsky.bugs.amqp.MessageSender_ClientProxy.sendMessage(Unknown Source)
at com.alexlitovsky.bugs.amqp.MessageSender_ScheduledInvoker_sendMessage_28ac1eed06c7507f1cfe170b7c9ab14464f2f6dc.invokeBean(Unknown Source)
at io.quarkus.arc.runtime.BeanInvoker.invoke(BeanInvoker.java:20)
at io.quarkus.scheduler.runtime.SimpleScheduler$ScheduledTask$1.run(SimpleScheduler.java:274)
at io.quarkus.vertx.core.runtime.VertxCoreRecorder$13.runWith(VertxCoreRecorder.java:548)
at org.jboss.threads.EnhancedQueueExecutor$Task.run(EnhancedQueueExecutor.java:2449)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.run(EnhancedQueueExecutor.java:1478)
at org.jboss.threads.DelegatingRunnable.run(DelegatingRunnable.java:29)
at org.jboss.threads.ThreadLocalResettingRunnable.run(ThreadLocalResettingRunnable.java:29)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.util.concurrent.TimeoutException
at java.base/java.util.concurrent.CompletableFuture.timedGet(CompletableFuture.java:1886)
at java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2021)
at com.alexlitovsky.bugs.amqp.MessageSender.sendMessage(MessageSender.java:43)
... 22 more
``` | 14f39aa1c9bcbb0a5adaa9ba89b5a1f2ecaa433f | ba468b722e270df4c858cd31a890d55a3fdc1e16 | https://github.com/smallrye/smallrye-reactive-messaging/compare/14f39aa1c9bcbb0a5adaa9ba89b5a1f2ecaa433f...ba468b722e270df4c858cd31a890d55a3fdc1e16 | diff --git a/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpCreditBasedSender.java b/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpCreditBasedSender.java
index 4f646ddfa..55314c8dc 100644
--- a/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpCreditBasedSender.java
+++ b/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpCreditBasedSender.java
@@ -56,6 +56,12 @@ public class AmqpCreditBasedSender implements Processor<Message<?>, Message<?>>,
private volatile boolean isAnonymous;
+ /**
+ * A flag tracking if we are retrieving the credits for the sender.
+ * It avoids flooding the broker with credit requests.
+ */
+ private volatile boolean creditRetrievalInProgress = false;
+
public AmqpCreditBasedSender(AmqpConnector connector, ConnectionHolder holder,
AmqpConnectorOutgoingConfiguration configuration, Uni<AmqpSender> retrieveSender) {
this.connector = connector;
@@ -152,6 +158,9 @@ public class AmqpCreditBasedSender implements Processor<Message<?>, Message<?>>,
subscription.request(credits);
return credits;
}
+ if (credits == 0L && subscription != Subscriptions.CANCELLED) {
+ onNoMoreCredit(sender);
+ }
return 0L;
}
@@ -188,22 +197,28 @@ public class AmqpCreditBasedSender implements Processor<Message<?>, Message<?>>,
}
private void onNoMoreCredit(AmqpSender sender) {
- log.noMoreCreditsForChannel(configuration.getChannel());
- holder.getContext().runOnContext(() -> {
- if (isCancelled()) {
- return;
- }
- long c = setCreditsAndRequest(sender);
- if (c == 0L) { // still no credits, schedule a periodic retry
- holder.getVertx().setPeriodic(configuration.getCreditRetrievalPeriod(), id -> {
- if (setCreditsAndRequest(sender) != 0L || isCancelled()) {
- // Got our new credits or the application has been terminated,
- // we cancel the periodic task.
- holder.getVertx().cancelTimer(id);
- }
- });
- }
- });
+ if (!creditRetrievalInProgress) {
+ creditRetrievalInProgress = true;
+ log.noMoreCreditsForChannel(configuration.getChannel());
+ holder.getContext().runOnContext(() -> {
+ if (isCancelled()) {
+ return;
+ }
+ long c = setCreditsAndRequest(sender);
+ if (c == 0L) { // still no credits, schedule a periodic retry
+ holder.getVertx().setPeriodic(configuration.getCreditRetrievalPeriod(), id -> {
+ if (setCreditsAndRequest(sender) != 0L || isCancelled()) {
+ // Got our new credits or the application has been terminated,
+ // we cancel the periodic task.
+ holder.getVertx().cancelTimer(id);
+ creditRetrievalInProgress = false;
+ }
+ });
+ } else {
+ creditRetrievalInProgress = false;
+ }
+ });
+ }
}
private boolean isCancelled() {
@@ -236,7 +251,9 @@ public class AmqpCreditBasedSender implements Processor<Message<?>, Message<?>>,
getSenderAndCredits()
.onItem().ignore().andContinueWithNull()
.subscribe().with(s -> {
- }, f -> downstream.get().onError(f));
+ }, f -> {
+ downstream.get().onError(f);
+ });
}
}
diff --git a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpCreditTest.java b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpCreditTest.java
index 0a207cebd..bc24f00be 100644
--- a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpCreditTest.java
+++ b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpCreditTest.java
@@ -60,7 +60,7 @@ public class AmqpCreditTest extends AmqpTestBase {
assertThat(msgsReceived.await(20, TimeUnit.SECONDS))
.withFailMessage("Sent %s msgs but %s remain outstanding", msgCount, msgsReceived.getCount()).isTrue();
- List<Integer> expectedPayloads = IntStream.range(0, msgCount).mapToObj(Integer::valueOf).collect(Collectors.toList());
+ List<Integer> expectedPayloads = IntStream.range(0, msgCount).boxed().collect(Collectors.toList());
assertThat(payloadsReceived).containsAll(expectedPayloads);
}
| ['smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpCreditBasedSender.java', 'smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpCreditTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,571,025 | 315,298 | 41,914 | 456 | 2,360 | 417 | 51 | 1 | 4,886 | 306 | 1,380 | 58 | 1 | 1 | 1970-01-01T00:27:27 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,318 | smallrye/smallrye-reactive-messaging/682/681 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/681 | https://github.com/smallrye/smallrye-reactive-messaging/pull/682 | https://github.com/smallrye/smallrye-reactive-messaging/pull/682 | 1 | resolves | Cannot configure dead letter queue value serializer | When using Kafka `dead-letter-queue` failure strategy, adding a `dead-letter-queue.value.serializer` has no impact, and the name deduced from the `value.deserializer` is still used. See [KafkaDeadLetterQueue lines 41-42](https://github.com/smallrye/smallrye-reactive-messaging/blob/master/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java#L41-L42), it is using the key serializer config rather than value deserializer | 731e7d2ecbdd6549710b1fc47e8df0ee088c9570 | f2bb597961ca8880a5afce7694608658e772600f | https://github.com/smallrye/smallrye-reactive-messaging/compare/731e7d2ecbdd6549710b1fc47e8df0ee088c9570...f2bb597961ca8880a5afce7694608658e772600f | diff --git a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
index 07324df42..2a854b71f 100644
--- a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
+++ b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
@@ -39,7 +39,7 @@ public class KafkaDeadLetterQueue implements KafkaFailureHandler {
deadQueueProducerConfig.put("key.serializer",
conf.getDeadLetterQueueKeySerializer().orElse(getMirrorSerializer(keyDeserializer)));
deadQueueProducerConfig.put("value.serializer",
- conf.getDeadLetterQueueKeySerializer().orElse(getMirrorSerializer(valueDeserializer)));
+ conf.getDeadLetterQueueValueSerializer().orElse(getMirrorSerializer(valueDeserializer)));
String deadQueueTopic = conf.getDeadLetterQueueTopic().orElse("dead-letter-topic-" + conf.getChannel());
| ['smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 866,185 | 173,678 | 23,805 | 324 | 211 | 32 | 2 | 1 | 484 | 37 | 119 | 1 | 1 | 0 | 1970-01-01T00:26:35 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,319 | smallrye/smallrye-reactive-messaging/676/675 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/675 | https://github.com/smallrye/smallrye-reactive-messaging/pull/676 | https://github.com/smallrye/smallrye-reactive-messaging/pull/676 | 1 | fix | ClassCastException when a nacked message is sent to the DLQ | The failure strategy 'dead-letter-queue' seems to be broke.
Here is my configuration :
Producer code :
```
mp.messaging.outgoing.generated-price.connector=smallrye-kafka
mp.messaging.outgoing.generated-price.topic=prices
mp.messaging.outgoing.generated-price.value.serializer=org.apache.kafka.common.serialization.IntegerSerializer
```
```java
@Outgoing("generated-price")
public Flowable<Integer> generate() {
return Flowable.interval(5, TimeUnit.SECONDS)
.map(tick -> random.nextInt(100));
}
```
Consumer code :
```
mp.messaging.incoming.prices.connector=smallrye-kafka
mp.messaging.incoming.prices.bootstrap.servers=localhost:9092
mp.messaging.incoming.prices.value.deserializer=org.apache.kafka.common.serialization.IntegerDeserializer
mp.messaging.incoming.prices.group.id=prices-group-local
mp.messaging.incoming.prices.client.id=prices-client-local
mp.messaging.incoming.prices.enable.auto.commit=false
mp.messaging.incoming.prices.failure-strategy=dead-letter-queue
```
```java
@Incoming( "prices" )
@Outgoing( "double-prices" )
@Acknowledgment(Acknowledgment.Strategy.POST_PROCESSING)
public Uni< Integer > consume( final Integer price) {
log.info("incoming message {}", price);
if(price< 50) {
return Uni.createFrom().item(price* 2);
}
else {
return Uni.createFrom().failure(new Exception("Price has to be < 50"));
}
}
```
I got the following stacktrace when the price is >= 50 :
```java
2020-07-21 14:19:07,770 ERROR [io.sma.rea.mes.kafka] (vert.x-eventloop-thread-8) SRMSG18207: Unable to dispatch message to Kafka: java.lang.ClassCastException: io.vertx.kafka.client.producer.RecordMetadata cannot be cast to java.lang.Void
at io.smallrye.context.SmallRyeThreadContext$ContextualFunction.apply(SmallRyeThreadContext.java:76)
at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616)
at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
at io.smallrye.mutiny.operators.UniSubscribeToCompletionStage$2.onItem(UniSubscribeToCompletionStage.java:44)
at io.smallrye.mutiny.context.ContextPropagationUniInterceptor$1.lambda$onItem$1(ContextPropagationUniInterceptor.java:35)
at io.smallrye.context.SmallRyeThreadContext.lambda$withContext$0(SmallRyeThreadContext.java:217)
at io.smallrye.mutiny.context.ContextPropagationUniInterceptor$1.onItem(ContextPropagationUniInterceptor.java:35)
at io.smallrye.mutiny.operators.UniSerializedSubscriber.onItem(UniSerializedSubscriber.java:72)
at io.smallrye.mutiny.vertx.AsyncResultUni.lambda$subscribing$1(AsyncResultUni.java:34)
at io.vertx.kafka.client.producer.impl.KafkaProducerImpl.lambda$send$8(KafkaProducerImpl.java:160)
at io.vertx.kafka.client.producer.impl.KafkaWriteStreamImpl.lambda$null$1(KafkaWriteStreamImpl.java:118)
at io.vertx.core.impl.ContextImpl.executeTask(ContextImpl.java:366)
at io.vertx.core.impl.EventLoopContext.lambda$executeAsync$0(EventLoopContext.java:38)
at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:164)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
```
The application stops to consume new messages and the lag is increasing.
I notice that when I lauch this command :
```
kafka-console-consumer -bootstrap-server localhost:9092 --topic dead-letter-topic-prices
```
I see an incoming message so the DLQ is correctly populated.
I use quarkus 1.6
Java :
λ java -version
java version "1.8.0_261"
Java(TM) SE Runtime Environment (build 1.8.0_261-b12)
Java HotSpot(TM) 64-Bit Server VM (build 25.261-b12, mixed mode)
| 1e25e00536a4c1ed132279c505155d3fde9eb314 | 05a0a044ca54ec251199627d594ab2ff421a606d | https://github.com/smallrye/smallrye-reactive-messaging/compare/1e25e00536a4c1ed132279c505155d3fde9eb314...05a0a044ca54ec251199627d594ab2ff421a606d | diff --git a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
index 10bb19cbe..07324df42 100644
--- a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
+++ b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java
@@ -72,6 +72,7 @@ public class KafkaDeadLetterQueue implements KafkaFailureHandler {
log.messageNackedDeadLetter(channel, topic);
return producer.send(dead)
.onFailure().invoke(t -> source.reportFailure((Throwable) t))
+ .onItem().ignore().andContinueWithNull()
.subscribeAsCompletionStage();
}
}
diff --git a/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java b/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
index dd93c45bf..76eb052af 100644
--- a/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
+++ b/smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java
@@ -24,6 +24,7 @@ import org.junit.After;
import org.junit.Test;
import io.smallrye.config.SmallRyeConfigProviderResolver;
+import io.smallrye.mutiny.Uni;
import io.smallrye.reactive.messaging.health.HealthReport;
import io.smallrye.reactive.messaging.kafka.*;
@@ -42,7 +43,7 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
@Test
public void testFailStrategy() {
- addConfig(getFailConfig());
+ addConfig(getFailConfig("fail"));
container = baseWeld().addBeanClass(MyReceiverBean.class).initialize();
await().until(() -> {
@@ -66,9 +67,36 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
});
}
+ @Test
+ public void testFailStrategyWithPayload() {
+ addConfig(getFailConfig("fail-payload"));
+ container = baseWeld().addBeanClass(MyReceiverBeanUsingPayload.class).initialize();
+
+ await().until(() -> {
+ HealthReport readiness = getHealth(container).getReadiness();
+ return readiness.isOk();
+ });
+
+ KafkaUsage usage = new KafkaUsage();
+ AtomicInteger counter = new AtomicInteger();
+ new Thread(() -> usage.produceIntegers(10, null,
+ () -> new ProducerRecord<>("fail-payload", counter.getAndIncrement()))).start();
+
+ MyReceiverBeanUsingPayload bean = container.getBeanManager().createInstance().select(MyReceiverBeanUsingPayload.class)
+ .get();
+ await().atMost(2, TimeUnit.MINUTES).until(() -> bean.list().size() >= 4);
+ // Other records should not have been received.
+ assertThat(bean.list()).containsExactly(0, 1, 2, 3);
+
+ await().until(() -> {
+ HealthReport liveness = getHealth(container).getLiveness();
+ return !liveness.isOk();
+ });
+ }
+
@Test
public void testIgnoreStrategy() {
- addConfig(getIgnoreConfig());
+ addConfig(getIgnoreConfig("ignore"));
container = baseWeld().addBeanClass(MyReceiverBean.class).initialize();
await().until(() -> {
@@ -90,6 +118,31 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
assertThat(liveness.isOk()).isTrue();
}
+ @Test
+ public void testIgnoreStrategyWithPayload() {
+ addConfig(getIgnoreConfig("ignore-payload"));
+ container = baseWeld().addBeanClass(MyReceiverBeanUsingPayload.class).initialize();
+
+ await().until(() -> {
+ HealthReport readiness = getHealth(container).getReadiness();
+ return readiness.isOk();
+ });
+
+ KafkaUsage usage = new KafkaUsage();
+ AtomicInteger counter = new AtomicInteger();
+ new Thread(() -> usage.produceIntegers(10, null,
+ () -> new ProducerRecord<>("ignore-payload", counter.getAndIncrement()))).start();
+
+ MyReceiverBeanUsingPayload bean = container.getBeanManager().createInstance().select(MyReceiverBeanUsingPayload.class)
+ .get();
+ await().atMost(2, TimeUnit.MINUTES).until(() -> bean.list().size() >= 10);
+ // All records should not have been received.
+ assertThat(bean.list()).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
+
+ HealthReport liveness = getHealth(container).getLiveness();
+ assertThat(liveness.isOk()).isTrue();
+ }
+
@Test
public void testDeadLetterQueueStrategyWithDefaultTopic() {
KafkaUsage usage = new KafkaUsage();
@@ -128,6 +181,45 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
assertThat(liveness.isOk()).isTrue();
}
+ @Test
+ public void testDeadLetterQueueStrategyWithCustomTopicAndMethodUsingPayload() {
+ KafkaUsage usage = new KafkaUsage();
+ List<ConsumerRecord<String, Integer>> records = new CopyOnWriteArrayList<>();
+ String randomId = UUID.randomUUID().toString();
+
+ usage.consume(randomId, randomId, OffsetResetStrategy.EARLIEST,
+ new StringDeserializer(), new IntegerDeserializer(), () -> records.size() < 3, null, null,
+ Collections.singletonList("dead-letter-topic-kafka-payload"), records::add);
+
+ addConfig(getDeadLetterQueueWithCustomConfig("dq-payload", "dead-letter-topic-kafka-payload"));
+ container = baseWeld().addBeanClass(MyReceiverBeanUsingPayload.class).initialize();
+
+ await().until(() -> {
+ HealthReport readiness = getHealth(container).getReadiness();
+ return readiness.isOk();
+ });
+
+ AtomicInteger counter = new AtomicInteger();
+ new Thread(() -> usage.produceIntegers(10, null,
+ () -> new ProducerRecord<>("dq-payload", counter.getAndIncrement()))).start();
+
+ MyReceiverBeanUsingPayload bean = container.getBeanManager().createInstance().select(MyReceiverBeanUsingPayload.class)
+ .get();
+ await().atMost(2, TimeUnit.MINUTES).until(() -> bean.list().size() >= 10);
+ assertThat(bean.list()).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
+
+ await().atMost(2, TimeUnit.MINUTES).until(() -> records.size() == 3);
+ assertThat(records).allSatisfy(r -> {
+ assertThat(r.topic()).isEqualTo("dead-letter-topic-kafka-payload");
+ assertThat(r.value()).isIn(3, 6, 9);
+ assertThat(new String(r.headers().lastHeader("dead-letter-reason").value())).startsWith("nack 3 -");
+ assertThat(r.headers().lastHeader("dead-letter-cause")).isNull();
+ });
+
+ HealthReport liveness = getHealth(container).getLiveness();
+ assertThat(liveness.isOk()).isTrue();
+ }
+
@Test
public void testDeadLetterQueueStrategyWithCustomConfig() {
KafkaUsage usage = new KafkaUsage();
@@ -138,7 +230,7 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
new StringDeserializer(), new IntegerDeserializer(), () -> records.size() < 3, null, null,
Collections.singletonList("missed"), records::add);
- addConfig(getDeadLetterQueueWithCustomConfig());
+ addConfig(getDeadLetterQueueWithCustomConfig("dead-letter-custom", "missed"));
container = baseWeld().addBeanClass(MyReceiverBean.class).initialize();
await().until(() -> {
@@ -166,12 +258,12 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
assertThat(liveness.isOk()).isTrue();
}
- private MapBasedConfig getFailConfig() {
+ private MapBasedConfig getFailConfig(String topic) {
String prefix = "mp.messaging.incoming.kafka.";
Map<String, Object> config = new HashMap<>();
config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
config.put(prefix + "group.id", "my-group");
- config.put(prefix + "topic", "fail");
+ config.put(prefix + "topic", topic);
config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
config.put(prefix + "enable.auto.commit", "false");
config.put(prefix + "auto.offset.reset", "earliest");
@@ -180,11 +272,11 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
return new MapBasedConfig(config);
}
- private MapBasedConfig getIgnoreConfig() {
+ private MapBasedConfig getIgnoreConfig(String topic) {
String prefix = "mp.messaging.incoming.kafka.";
Map<String, Object> config = new HashMap<>();
config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
- config.put(prefix + "topic", "ignore");
+ config.put(prefix + "topic", topic);
config.put(prefix + "group.id", "my-group");
config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
config.put(prefix + "enable.auto.commit", "false");
@@ -208,17 +300,17 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
return new MapBasedConfig(config);
}
- private MapBasedConfig getDeadLetterQueueWithCustomConfig() {
+ private MapBasedConfig getDeadLetterQueueWithCustomConfig(String topic, String dq) {
String prefix = "mp.messaging.incoming.kafka.";
Map<String, Object> config = new HashMap<>();
config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
config.put(prefix + "group.id", "my-group");
- config.put(prefix + "topic", "dead-letter-custom");
+ config.put(prefix + "topic", topic);
config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
config.put(prefix + "enable.auto.commit", "false");
config.put(prefix + "auto.offset.reset", "earliest");
config.put(prefix + "failure-strategy", "dead-letter-queue");
- config.put(prefix + "dead-letter-queue.topic", "missed");
+ config.put(prefix + "dead-letter-queue.topic", dq);
config.put(prefix + "dead-letter-queue.key.serializer", IntegerSerializer.class.getName());
config.put(prefix + "dead-letter-queue.value.serializer", IntegerSerializer.class.getName());
@@ -227,7 +319,7 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
@ApplicationScoped
public static class MyReceiverBean {
- private List<Integer> received = new ArrayList<>();
+ private final List<Integer> received = new ArrayList<>();
@Incoming("kafka")
public CompletionStage<Void> process(KafkaRecord<String, Integer> record) {
@@ -244,4 +336,23 @@ public class KafkaFailureHandlerTest extends KafkaTestBase {
}
}
+
+ @ApplicationScoped
+ public static class MyReceiverBeanUsingPayload {
+ private final List<Integer> received = new ArrayList<>();
+
+ @Incoming("kafka")
+ public Uni<Void> process(int value) {
+ received.add(value);
+ if (value != 0 && value % 3 == 0) {
+ return Uni.createFrom().failure(new IllegalArgumentException("nack 3 - " + value));
+ }
+ return Uni.createFrom().nullItem();
+ }
+
+ public List<Integer> list() {
+ return received;
+ }
+
+ }
} | ['smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/fault/KafkaDeadLetterQueue.java', 'smallrye-reactive-messaging-kafka/src/test/java/io/smallrye/reactive/messaging/kafka/fault/KafkaFailureHandlerTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 866,128 | 173,666 | 23,804 | 324 | 57 | 12 | 1 | 1 | 4,498 | 231 | 1,034 | 99 | 0 | 6 | 1970-01-01T00:26:35 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,320 | smallrye/smallrye-reactive-messaging/520/515 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/515 | https://github.com/smallrye/smallrye-reactive-messaging/pull/520 | https://github.com/smallrye/smallrye-reactive-messaging/pull/520 | 1 | fix | Exception thrown from connector during startup discarded by provider with cause thrown instead | If an exception occurs when a connector creates its subscriber or producer, if that exception has a cause, the exception itself is discarded and the cause is reported as a deployment error.
This happens here: https://github.com/smallrye/smallrye-reactive-messaging/blob/master/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java#L135
This was unhelpful in my connector, because the exception that was discarded contained the information about which channel the connector failed for. | ac44eaf3f2ed608bb90f87061e8632a43698d93c | a6b48682a6dbdc14998ea16af41ab63ea6d5ca5e | https://github.com/smallrye/smallrye-reactive-messaging/compare/ac44eaf3f2ed608bb90f87061e8632a43698d93c...a6b48682a6dbdc14998ea16af41ab63ea6d5ca5e | diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java
index fdc018899..233fb0ed6 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java
@@ -132,12 +132,7 @@ public class ReactiveMessagingExtension implements Extension {
}
} catch (Exception e) {
- if (e.getCause() == null) {
- done.addDeploymentProblem(e);
- } else {
- done.addDeploymentProblem(e.getCause());
- }
-
+ done.addDeploymentProblem(e);
}
}
diff --git a/smallrye-reactive-messaging-provider/src/test/java/io/smallrye/reactive/messaging/merge/InvalidBindingTest.java b/smallrye-reactive-messaging-provider/src/test/java/io/smallrye/reactive/messaging/merge/InvalidBindingTest.java
index 4da66dedc..669e6c31a 100644
--- a/smallrye-reactive-messaging-provider/src/test/java/io/smallrye/reactive/messaging/merge/InvalidBindingTest.java
+++ b/smallrye-reactive-messaging-provider/src/test/java/io/smallrye/reactive/messaging/merge/InvalidBindingTest.java
@@ -39,7 +39,7 @@ public class InvalidBindingTest extends WeldTestBaseWithoutTails {
fail("Invalid weaving not detected");
} catch (DeploymentException e) {
assertThat(e.getCause())
- .isInstanceOf(WeavingException.class)
+ .hasCauseInstanceOf(WeavingException.class)
.hasMessageContaining("`source`")
.hasMessageContaining("#sink")
.hasMessageContaining("(2)");
@@ -70,7 +70,8 @@ public class InvalidBindingTest extends WeldTestBaseWithoutTails {
} catch (DeploymentException e) {
e.getCause().printStackTrace();
assertThat(e.getCause())
- .isInstanceOf(WeavingException.class)
+ .isInstanceOf(DeploymentException.class)
+ .hasCauseInstanceOf(WeavingException.class)
.hasMessageContaining("source")
.hasMessageContaining("Synchronous");
} | ['smallrye-reactive-messaging-provider/src/test/java/io/smallrye/reactive/messaging/merge/InvalidBindingTest.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/extension/ReactiveMessagingExtension.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 680,396 | 134,391 | 18,821 | 238 | 227 | 40 | 7 | 1 | 560 | 58 | 112 | 5 | 1 | 0 | 1970-01-01T00:26:27 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,321 | smallrye/smallrye-reactive-messaging/109/108 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/108 | https://github.com/smallrye/smallrye-reactive-messaging/pull/109 | https://github.com/smallrye/smallrye-reactive-messaging/pull/109 | 1 | fixes | Kafka configuration parsing fails with `java.lang.NumberFormatException: For input string: "org.apache.kafka.common.serialization.StringSerializer"` | In the native mode, a Quarkus app that uses reactive-messaging-kafka fails with the error below.
It seems the problem is in the fact that SmallRye Config wraps the error in parsing value in an IllegalArgumentException.
```
2019-05-15 16:20:30,472 ERROR [io.sma.rea.mes.imp.LegacyConfiguredStreamFactory] (main) Unable to create the publisher or subscriber during initialization: java.lang.IllegalArgumentException: java.lang.reflect.InvocationTargetException
at io.smallrye.config.ImplicitConverters$StaticMethodConverter.convert(ImplicitConverters.java:99)
at io.smallrye.config.SmallRyeConfig.convert(SmallRyeConfig.java:133)
at io.smallrye.config.SmallRyeConfig.getValue(SmallRyeConfig.java:76)
at io.smallrye.reactive.messaging.impl.ConnectorConfig.getValue(ConnectorConfig.java:57)
at io.smallrye.reactive.messaging.spi.ConfigurationHelper.asJsonObject(ConfigurationHelper.java:51)
at io.smallrye.reactive.messaging.kafka.KafkaSink.<init>(KafkaSink.java:30)
at io.smallrye.reactive.messaging.kafka.KafkaMessagingProvider.getSubscriberBuilder(KafkaMessagingProvider.java:47)
at io.smallrye.reactive.messaging.kafka.KafkaMessagingProvider_ClientProxy.getSubscriberBuilder(Unknown Source)
at io.smallrye.reactive.messaging.impl.LegacyConfiguredStreamFactory.createSubscriberBuilder(LegacyConfiguredStreamFactory.java:134)
at io.smallrye.reactive.messaging.impl.LegacyConfiguredStreamFactory.lambda$initialize$3(LegacyConfiguredStreamFactory.java:101)
at java.util.HashMap.forEach(HashMap.java:1289)
at io.smallrye.reactive.messaging.impl.LegacyConfiguredStreamFactory.initialize(LegacyConfiguredStreamFactory.java:101)
at io.smallrye.reactive.messaging.impl.LegacyConfiguredStreamFactory_ClientProxy.initialize(Unknown Source)
at java.util.Iterator.forEachRemaining(Iterator.java:116)
at java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801)
at java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:580)
at io.smallrye.reactive.messaging.extension.MediatorManager.initializeAndRun(MediatorManager.java:103)
at io.smallrye.reactive.messaging.extension.MediatorManager_ClientProxy.initializeAndRun(Unknown Source)
at io.quarkus.smallrye.reactivemessaging.runtime.SmallRyeReactiveMessagingLifecycle.onApplicationStart(SmallRyeReactiveMessagingLifecycle.java:18)
at io.quarkus.smallrye.reactivemessaging.runtime.SmallRyeReactiveMessagingLifecycle_Observer_onApplicationStart_4e8937813d9e8faff65c3c07f88fa96615b70e70.notify(Unknown Source)
at io.quarkus.arc.EventImpl$Notifier.notify(EventImpl.java:244)
at io.quarkus.arc.EventImpl.fire(EventImpl.java:85)
at io.quarkus.arc.runtime.LifecycleEventRunner.fireStartupEvent(LifecycleEventRunner.java:39)
at io.quarkus.arc.runtime.ArcDeploymentTemplate.handleLifecycleEvents(ArcDeploymentTemplate.java:115)
at io.quarkus.deployment.steps.LifecycleEventsBuildStep$startupEvent20.deploy(Unknown Source)
at io.quarkus.runner.ApplicationImpl1.doStart(Unknown Source)
at io.quarkus.runtime.Application.start(Application.java:101)
at io.quarkus.runtime.Application.run(Application.java:213)
at io.quarkus.runner.GeneratedMain.main(Unknown Source)
Caused by: java.lang.reflect.InvocationTargetException
at java.lang.reflect.Method.invoke(Method.java:498)
at io.smallrye.config.ImplicitConverters$StaticMethodConverter.convert(ImplicitConverters.java:97)
... 28 more
Caused by: java.lang.NumberFormatException: For input string: "org.apache.kafka.common.serialization.StringSerializer"
at java.lang.Integer.parseInt(Integer.java:580)
at java.lang.Integer.valueOf(Integer.java:766)
... 30 more
``` | 532833838c99d94e0b5237a1078a8d41419af978 | ed3115c2ab57e3ff0d1095705b2cb0fe7abd86c5 | https://github.com/smallrye/smallrye-reactive-messaging/compare/532833838c99d94e0b5237a1078a8d41419af978...ed3115c2ab57e3ff0d1095705b2cb0fe7abd86c5 | diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/spi/ConfigurationHelper.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/spi/ConfigurationHelper.java
index f26f6a901..af09c06f0 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/spi/ConfigurationHelper.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/spi/ConfigurationHelper.java
@@ -51,7 +51,7 @@ public class ConfigurationHelper {
int i = config.getValue(key, Integer.class);
json.put(key, i);
continue;
- } catch (ClassCastException | NumberFormatException e) {
+ } catch (ClassCastException | IllegalArgumentException e) {
// Ignore me
}
@@ -59,7 +59,7 @@ public class ConfigurationHelper {
double d = config.getValue(key, Double.class);
json.put(key, d);
continue;
- } catch (ClassCastException | NumberFormatException e) {
+ } catch (ClassCastException | IllegalArgumentException e) {
// Ignore me
}
| ['smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/spi/ConfigurationHelper.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 228,611 | 47,985 | 6,704 | 100 | 261 | 46 | 4 | 1 | 3,902 | 142 | 865 | 44 | 0 | 1 | 1970-01-01T00:25:57 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,315 | smallrye/smallrye-reactive-messaging/951/950 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/950 | https://github.com/smallrye/smallrye-reactive-messaging/pull/951 | https://github.com/smallrye/smallrye-reactive-messaging/pull/951 | 1 | fix | MP Emitter repackaged from unreleased spec is not marked as Experimental | While there is a note in duplicated `org.eclipse.microprofile.reactive.messaging` package info that new features and changes against the spec are marked with `@Experimental` annotation.
https://github.com/smallrye/smallrye-reactive-messaging/blob/24e3993fd69e6bdc7a1bc60d4deee6a05788bfdd/api/src/main/java/org/eclipse/microprofile/reactive/messaging/package-info.java#L21-L26
`Emitter` and `Channel` are missing this, thus causing confusion between users as this is not part of released spec.
https://github.com/smallrye/smallrye-reactive-messaging/blob/24e3993fd69e6bdc7a1bc60d4deee6a05788bfdd/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java#L23-L48
| 24e3993fd69e6bdc7a1bc60d4deee6a05788bfdd | ca41a748200f23479187bf69c90a105414efe746 | https://github.com/smallrye/smallrye-reactive-messaging/compare/24e3993fd69e6bdc7a1bc60d4deee6a05788bfdd...ca41a748200f23479187bf69c90a105414efe746 | diff --git a/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Channel.java b/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Channel.java
index 8feee86ea..55755b583 100644
--- a/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Channel.java
+++ b/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Channel.java
@@ -27,6 +27,8 @@ import java.lang.annotation.Target;
import javax.enterprise.util.Nonbinding;
import javax.inject.Qualifier;
+import io.smallrye.common.annotation.Experimental;
+
/**
* This qualifier indicates which channel should be injected / populated.
* <p>
@@ -52,13 +54,14 @@ import javax.inject.Qualifier;
* emitter.send("a");
* </code>
* </pre>
- *
+ *
* A subscriber for the above channel must be found by the time a message is emitted to the channel.
* Otherwise, {@code IllegalStateException} must be thrown.
*/
@Qualifier
@Retention(RetentionPolicy.RUNTIME)
@Target({ METHOD, CONSTRUCTOR, FIELD, PARAMETER })
+@Experimental("smallrye-only, added to the specification")
public @interface Channel {
/**
diff --git a/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java b/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java
index e00f23ff1..22210f9b6 100644
--- a/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java
+++ b/api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java
@@ -20,6 +20,8 @@ package org.eclipse.microprofile.reactive.messaging;
import java.util.concurrent.CompletionStage;
+import io.smallrye.common.annotation.Experimental;
+
/**
* Interface used to feed a channel from an <em>imperative</em> piece of code.
* <p>
@@ -45,6 +47,7 @@ import java.util.concurrent.CompletionStage;
*
* @param <T> type of payload
*/
+@Experimental("smallrye-only, added to the specification")
public interface Emitter<T> {
/** | ['api/src/main/java/org/eclipse/microprofile/reactive/messaging/Channel.java', 'api/src/main/java/org/eclipse/microprofile/reactive/messaging/Emitter.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,066,041 | 213,262 | 29,080 | 376 | 235 | 47 | 8 | 2 | 690 | 44 | 188 | 9 | 2 | 0 | 1970-01-01T00:26:51 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,314 | smallrye/smallrye-reactive-messaging/1504/1491 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/1491 | https://github.com/smallrye/smallrye-reactive-messaging/pull/1504 | https://github.com/smallrye/smallrye-reactive-messaging/pull/1504 | 1 | fix | Host disappears if used with AmqpClientOptions | If I have
`mp.messaging.outgoing.source.client-options-name=ssl-client-options`
which points to
```
public AmqpClientOptions getOptions() {
return new AmqpClientOptions()
.setSsl(true)
// TODO Having to set the host is a bit weird since we don't need it when not using SSL
.setHost("localhost")
.setPfxTrustOptions(
new PfxOptions()
.setPath("/blah/blah/client.truststore.p12")
.setPassword("clientts")
);
}
```
If I remove the `setHost()` call it fails:
```
2021-11-01 15:38:11,505 ERROR [io.smallrye.reactive.messaging.amqp] (MSC service thread 1-3) SRMSG16215: Unable to connect to the broker, retry will be attempted: java.lang.NullPointerException: Host must be set
at java.util.Objects.requireNonNull(Objects.java:228)
at io.vertx.amqp.impl.AmqpClientImpl.connect(AmqpClientImpl.java:58)
at io.vertx.amqp.impl.AmqpClientImpl.connect(AmqpClientImpl.java:52)
at io.vertx.mutiny.amqp.AmqpClient.lambda$connect$1(AmqpClient.java:102)
at io.smallrye.mutiny.vertx.AsyncResultUni.subscribe(AsyncResultUni.java:31)
--- SNIP --
```
This happens whether or not I have amqp-host set in my MP Config or not. Without the AmqpClientOptions I don't need AMQP-host, since it in that case defaults to zero. | e5503c048e0ddc8a1eddf4224c0598e578fdda59 | a27e74ab5d19981028745cc7c2557ddc6d780c1a | https://github.com/smallrye/smallrye-reactive-messaging/compare/e5503c048e0ddc8a1eddf4224c0598e578fdda59...a27e74ab5d19981028745cc7c2557ddc6d780c1a | diff --git a/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpClientHelper.java b/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpClientHelper.java
index aac28490f..dc9002534 100644
--- a/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpClientHelper.java
+++ b/smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpClientHelper.java
@@ -26,7 +26,7 @@ public class AmqpClientHelper {
Optional<String> clientOptionsName = config.getClientOptionsName();
Vertx vertx = connector.getVertx();
if (clientOptionsName.isPresent()) {
- client = createClientFromClientOptionsBean(vertx, instance, clientOptionsName.get());
+ client = createClientFromClientOptionsBean(vertx, instance, clientOptionsName.get(), config);
} else {
client = getClient(vertx, config);
}
@@ -35,7 +35,7 @@ public class AmqpClientHelper {
}
static AmqpClient createClientFromClientOptionsBean(Vertx vertx, Instance<AmqpClientOptions> instance,
- String optionsBeanName) {
+ String optionsBeanName, AmqpConnectorCommonConfiguration config) {
Instance<AmqpClientOptions> options = instance.select(Identifier.Literal.of(optionsBeanName));
if (options.isUnsatisfied()) {
// this `if` block should be removed when support for the `@Named` annotation is removed
@@ -48,39 +48,92 @@ public class AmqpClientHelper {
throw ex.illegalStateFindingBean(AmqpClientOptions.class.getName(), optionsBeanName);
}
log.createClientFromBean(optionsBeanName);
- return AmqpClient.create(vertx, options.get());
+
+ // We must merge the channel config and the AMQP Client options.
+ // In case of conflict, use the channel config.
+ AmqpClientOptions clientOptions = getOptions(config);
+ AmqpClientOptions completeOptions = options.get();
+ mergeTo(clientOptions, completeOptions);
+ return AmqpClient.create(vertx, completeOptions);
+ }
+
+ /**
+ * Merges the values from {@code channel} (the channel configuration), into the {@code custom}.
+ * Values from {@code channel} replaces the values from {@code custom}.
+ *
+ * @param channel the channel configuration
+ * @param custom the custom configuration
+ */
+ static void mergeTo(AmqpClientOptions channel, AmqpClientOptions custom) {
+ String username = channel.getUsername();
+ String password = channel.getPassword();
+ String host = channel.getHost();
+ int port = channel.getPort();
+ boolean useSsl = channel.isSsl();
+ int reconnectAttempts = channel.getReconnectAttempts();
+ long reconnectInterval = channel.getReconnectInterval();
+ int connectTimeout = channel.getConnectTimeout();
+
+ if (username != null) {
+ custom.setUsername(channel.getUsername());
+ }
+ if (password != null) {
+ custom.setPassword(channel.getPassword());
+ }
+ if (host != null) {
+ custom.setHost(channel.getHost());
+ }
+ if (port > 0) {
+ custom.setPort(channel.getPort());
+ }
+ if (useSsl) {
+ custom.setSsl(channel.isSsl());
+ }
+ if (reconnectAttempts > 0) {
+ custom.setReconnectAttempts(channel.getReconnectAttempts());
+ }
+ if (reconnectInterval > 0) {
+ custom.setReconnectInterval(channel.getReconnectInterval());
+ }
+ if (connectTimeout > 0) {
+ custom.setConnectTimeout(channel.getConnectTimeout());
+ }
+ }
+
+ static AmqpClientOptions getOptions(AmqpConnectorCommonConfiguration config) {
+ String username = config.getUsername().orElse(null);
+ String password = config.getPassword().orElse(null);
+ String host = config.getHost();
+ int port = config.getPort();
+ log.brokerConfigured(host, port, config.getChannel());
+ boolean useSsl = config.getUseSsl();
+ int reconnectAttempts = config.getReconnectAttempts();
+ int reconnectInterval = config.getReconnectInterval();
+ int connectTimeout = config.getConnectTimeout();
+
+ // We renamed containerID into container-id. So we must check both.
+ String containerId = config.getContainerId()
+ .orElseGet(() -> config.config.getOptionalValue("containerId", String.class).orElse(null));
+
+ AmqpClientOptions options = new AmqpClientOptions()
+ .setUsername(username)
+ .setPassword(password)
+ .setHost(host)
+ .setPort(port)
+ .setContainerId(containerId)
+ .setSsl(useSsl)
+ .setReconnectAttempts(reconnectAttempts)
+ .setReconnectInterval(reconnectInterval)
+ .setConnectTimeout(connectTimeout);
+
+ config.getSniServerName().ifPresent(options::setSniServerName);
+ config.getVirtualHost().ifPresent(options::setVirtualHost);
+ return options;
}
static AmqpClient getClient(Vertx vertx, AmqpConnectorCommonConfiguration config) {
try {
- String username = config.getUsername().orElse(null);
- String password = config.getPassword().orElse(null);
- String host = config.getHost();
- int port = config.getPort();
- log.brokerConfigured(host, port, config.getChannel());
- boolean useSsl = config.getUseSsl();
- int reconnectAttempts = config.getReconnectAttempts();
- int reconnectInterval = config.getReconnectInterval();
- int connectTimeout = config.getConnectTimeout();
-
- // We renamed containerID into container-id. So we must check both.
- String containerId = config.getContainerId()
- .orElseGet(() -> config.config.getOptionalValue("containerId", String.class).orElse(null));
-
- AmqpClientOptions options = new AmqpClientOptions()
- .setUsername(username)
- .setPassword(password)
- .setHost(host)
- .setPort(port)
- .setContainerId(containerId)
- .setSsl(useSsl)
- .setReconnectAttempts(reconnectAttempts)
- .setReconnectInterval(reconnectInterval)
- .setConnectTimeout(connectTimeout);
-
- config.getSniServerName().ifPresent(options::setSniServerName);
- config.getVirtualHost().ifPresent(options::setVirtualHost);
-
+ AmqpClientOptions options = getOptions(config);
return AmqpClient.create(vertx, options);
} catch (Exception e) {
log.unableToCreateClient(e);
diff --git a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpSourceCDIConfigTest.java b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpSourceCDIConfigTest.java
index 9c077359d..3d3c5bd9a 100644
--- a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpSourceCDIConfigTest.java
+++ b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpSourceCDIConfigTest.java
@@ -2,7 +2,6 @@ package io.smallrye.reactive.messaging.amqp;
import static org.assertj.core.api.Assertions.*;
import static org.awaitility.Awaitility.await;
-import static org.eclipse.microprofile.reactive.messaging.spi.ConnectorFactory.CHANNEL_NAME_ATTRIBUTE;
import java.util.*;
import java.util.concurrent.TimeUnit;
@@ -15,7 +14,6 @@ import org.jboss.weld.exceptions.DeploymentException;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
-import io.smallrye.common.constraint.NotNull;
import io.smallrye.config.SmallRyeConfigProviderResolver;
import io.smallrye.reactive.messaging.connectors.ExecutionHolder;
import io.smallrye.reactive.messaging.test.common.config.MapBasedConfig;
@@ -198,29 +196,40 @@ public class AmqpSourceCDIConfigTest extends AmqpBrokerTestBase {
.isInstanceOf(DeploymentException.class);
}
- @NotNull
- private Map<String, Object> getConfig(String topic) {
- Map<String, Object> config = new HashMap<>();
- config.put("address", topic);
- config.put(CHANNEL_NAME_ATTRIBUTE, UUID.randomUUID().toString());
- config.put("host", host);
- config.put("port", port);
- config.put("name", "some name");
- config.put("username", username);
- config.put("password", password);
- return config;
- }
+ /**
+ * Reproduce https://github.com/smallrye/smallrye-reactive-messaging/issues/1491.
+ */
+ @Test
+ public void testClientConfigWithHostSet() {
+ Weld weld = new Weld();
+
+ String address = UUID.randomUUID().toString();
+ weld.addBeanClass(ClientConfigurationBean.class);
+ weld.addBeanClass(ConsumptionBean.class);
+
+ new MapBasedConfig()
+ .with("mp.messaging.incoming.data.address", address)
+ .with("mp.messaging.incoming.data.connector", AmqpConnector.CONNECTOR_NAME)
+ .with("mp.messaging.incoming.data.host", host)
+ .with("mp.messaging.incoming.data.port", port)
+ .with("mp.messaging.incoming.data.tracing-enabled", false)
+ .with("amqp-username", username)
+ .with("amqp-password", password)
+ .with("amqp-client-options-name", "myclientoptions2")
+ .write();
+
+ container = weld.initialize();
+ await().until(() -> isAmqpConnectorAlive(container));
+ await().until(() -> isAmqpConnectorReady(container));
+ List<Integer> list = container.select(ConsumptionBean.class).get().getResults();
+ assertThat(list).isEmpty();
+
+ AtomicInteger counter = new AtomicInteger();
+ usage.produceTenIntegers(address, counter::getAndIncrement);
+
+ await().atMost(2, TimeUnit.MINUTES).until(() -> list.size() >= 10);
+ assertThat(list).containsExactly(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- @NotNull
- private Map<String, Object> getConfigUsingChannelName(String topic) {
- Map<String, Object> config = new HashMap<>();
- config.put(CHANNEL_NAME_ATTRIBUTE, topic);
- config.put("host", host);
- config.put("port", port);
- config.put("name", "some name");
- config.put("username", username);
- config.put("password", password);
- return config;
}
}
diff --git a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/ClientConfigurationBean.java b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/ClientConfigurationBean.java
index 254169ac2..c1ac3db79 100644
--- a/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/ClientConfigurationBean.java
+++ b/smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/ClientConfigurationBean.java
@@ -19,4 +19,14 @@ public class ClientConfigurationBean {
.setPassword(System.getProperty("amqp-pwd"));
}
+ @Produces
+ @Identifier("myclientoptions2")
+ public AmqpClientOptions options2() {
+ return new AmqpClientOptions()
+ .setContainerId("bla bla")
+ .setVirtualHost("foo bar")
+ .setUsername(System.getProperty("amqp-user"))
+ .setPassword(System.getProperty("amqp-pwd"));
+ }
+
} | ['smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/ClientConfigurationBean.java', 'smallrye-reactive-messaging-amqp/src/main/java/io/smallrye/reactive/messaging/amqp/AmqpClientHelper.java', 'smallrye-reactive-messaging-amqp/src/test/java/io/smallrye/reactive/messaging/amqp/AmqpSourceCDIConfigTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,376,779 | 276,435 | 36,915 | 433 | 5,319 | 1,004 | 115 | 1 | 1,392 | 122 | 345 | 28 | 0 | 2 | 1970-01-01T00:27:16 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,317 | smallrye/smallrye-reactive-messaging/770/769 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/769 | https://github.com/smallrye/smallrye-reactive-messaging/pull/770 | https://github.com/smallrye/smallrye-reactive-messaging/pull/770 | 1 | fix | Kafka passwords are written to the logs | When using smallrye-reactive-messaging with the Kafka connector, the connector merge the config and log this merged config.
It output the password in the logs, this is a security issue.
See this sample log file from a Quarkus application
```
2020-09-22 11:12:12,230 INFO [io.sma.rea.mes.kafka] (Quarkus Main Thread) SRMSG18200: Merging config with {security.protocol=SSL, ssl.keystore.type=PKCS12, ssl.truststore.location=truststore.jks, ssl.keystore.password=secret, ssl.key.password=secret, ssl.keystore.location=keystore.p12, bootstrap.servers=my-host:9092, ssl.truststore.password=secret}
``` | bb26ef47658f5f602cfcc0e5db6e0fadca40d4df | ace71c235e631b7e6478f0734e0765caaa7007d4 | https://github.com/smallrye/smallrye-reactive-messaging/compare/bb26ef47658f5f602cfcc0e5db6e0fadca40d4df...ace71c235e631b7e6478f0734e0765caaa7007d4 | diff --git a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/KafkaConnector.java b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/KafkaConnector.java
index 7eefe4765..2f90b64af 100644
--- a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/KafkaConnector.java
+++ b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/KafkaConnector.java
@@ -183,8 +183,6 @@ public class KafkaConnector implements IncomingConnectorFactory, OutgoingConnect
}
private Config merge(Config passedCfg, Map<String, Object> defaultKafkaCfg) {
- log.mergingConfigWith(defaultKafkaCfg);
-
return new Config() {
@SuppressWarnings("unchecked")
@Override
diff --git a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/i18n/KafkaLogging.java b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/i18n/KafkaLogging.java
index b15e52495..177f7b1bb 100644
--- a/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/i18n/KafkaLogging.java
+++ b/smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/i18n/KafkaLogging.java
@@ -1,6 +1,5 @@
package io.smallrye.reactive.messaging.kafka.i18n;
-import java.util.Map;
import java.util.Set;
import org.jboss.logging.BasicLogger;
@@ -19,10 +18,6 @@ public interface KafkaLogging extends BasicLogger {
KafkaLogging log = Logger.getMessageLogger(KafkaLogging.class, "io.smallrye.reactive.messaging.kafka");
- @LogMessage(level = Logger.Level.INFO)
- @Message(id = 18200, value = "Merging config with %s")
- void mergingConfigWith(Map<String, Object> defaultKafkaCfg);
-
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 18201, value = "Dead queue letter configured with: topic: `%s`, key serializer: `%s`, value serializer: `%s`")
void deadLetterConfig(String deadQueueTopic, String keySerializer, String valueSerializer); | ['smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/i18n/KafkaLogging.java', 'smallrye-reactive-messaging-kafka/src/main/java/io/smallrye/reactive/messaging/kafka/KafkaConnector.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 986,525 | 198,006 | 26,978 | 360 | 244 | 60 | 7 | 2 | 606 | 59 | 161 | 8 | 0 | 1 | 1970-01-01T00:26:40 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,312 | smallrye/smallrye-reactive-messaging/2219/2198 | smallrye | smallrye-reactive-messaging | https://github.com/smallrye/smallrye-reactive-messaging/issues/2198 | https://github.com/smallrye/smallrye-reactive-messaging/pull/2219 | https://github.com/smallrye/smallrye-reactive-messaging/pull/2219#issuecomment-1628538157 | 2 | fixes | Unordered blocking consumer requests too many messages | # Describe the bug
I made a [discussion post](https://github.com/smallrye/smallrye-reactive-messaging/discussions/2196) describing the problem:
<div type='discussions-op-text'>
<sup>Originally posted by **AndreasPetersen** June 14, 2023</sup>
I'm implementing a connector to Azure Service Bus using SmallRye Reactive Messaging 3.22.1. I want to be able to concurrently process a number of messages. I can do this by:
```
@Incoming("in-history")
@Blocking(ordered = false, value = "in-history")
public CompletionStage<Void> handleMessageFromAzure(Message<History> message) {
try {
logger.info("Processing message: " + message.getMetadata(MessageId.class).get().id()); // My connector adds a Azure Service Bus message id as metadata
Thread.sleep(1000); // Do some blocking work
return message.ack();
} catch (InterruptedException e) {
return message.nack(e);
}
}
```
with the config:
```
smallrye.messaging.worker.in-history.max-concurrency=2
```
This works almost as expected. Two messages are indeed processed concurrently, but requests are constantly being made to the publisher. My connector looks like this:
```
@Override
public PublisherBuilder<? extends Message<?>> getPublisherBuilder(Config config) {
ServiceBusReceiverAsyncClient serviceBusReceiverAsyncClient = getServiceBusReceiverAsyncClient(config); // This gives us an Azure Service Bus client that we can use to received messages from Azure
Flux<ServiceBusReceivedMessage> flux = serviceBusReceiverAsyncClient.receiveMessages();
var multi = Multi.createFrom().publisher(flux)
.log() // I've added a log to see what's going on
.onItem().invoke(serviceBusReceivedMessage -> logger.info("Received {}", serviceBusReceivedMessage.getMessageId()))
.onItem().transform(azureMessage -> mapToReactiveMessage(azureMessage))
return ReactiveStreams.fromPublisher(multi);
}
```
When starting my Quarkus service I can see in the logs:
```
2023-06-14 17:27:50,196 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 9500cb1f42ed4b9a860ec35c4c89b172
2023-06-14 17:27:50,216 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:50,247 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@79ed499b)
2023-06-14 17:27:50,248 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received b4f16eb0798c459aaef6bb7f1b6bcaed
2023-06-14 17:27:51,574 INFO [io.qua.mut.run.MutinyInfrastructure] (pool-1-thread-1) {} Multi..0 | request(1)
2023-06-14 17:27:51,575 INFO [io.qua.mut.run.MutinyInfrastructure] (pool-1-thread-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@337cb7d4)
2023-06-14 17:27:51,575 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (pool-1-thread-1) {} Received f4149959456041eaa386605c11a5c536
2023-06-14 17:27:51,576 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-4) {} Multi..0 | request(1)
2023-06-14 17:27:51,582 INFO [io.sma.rea.mes.pro.con.WorkerPoolRegistry] (vert.x-eventloop-thread-3) {sampled=true, spanId=edfc2b30352bb593, traceId=8bd823a32007c950151012ec38489f85} Created worker pool named in-history with concurrency of 2
2023-06-14 17:27:51,591 INFO [dk.ban.drb.ser.GenerateHistoryServiceBusConsumer] (in-history-0) {sampled=true, spanId=9213606bb5f98990, traceId=b2a7e115f36f8208c4a92c82db28fa4a} Processing message: f4149959456041eaa386605c11a5c536
2023-06-14 17:27:51,591 INFO [dk.ban.drb.ser.GenerateHistoryServiceBusConsumer] (in-history-1) {sampled=true, spanId=edfc2b30352bb593, traceId=8bd823a32007c950151012ec38489f85} Processing message: 9500cb1f42ed4b9a860ec35c4c89b172
2023-06-14 17:27:51,615 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@6b8ff80c)
2023-06-14 17:27:51,616 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received bc9e6cbe4cb040d7a3e1a67c3b5faeaf
2023-06-14 17:27:51,617 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,661 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@48e9a550)
2023-06-14 17:27:51,662 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 43a5c809241b4222bb985d8a0d6baa9c
2023-06-14 17:27:51,663 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,702 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@336e7e88)
2023-06-14 17:27:51,702 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received ecbc17cb38dd44a08b2b3421b7c9eb72
2023-06-14 17:27:51,703 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,741 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@20dc58ef)
2023-06-14 17:27:51,742 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 03002654344c44638eb78da8c10649d8
2023-06-14 17:27:51,743 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,784 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@34079b60)
2023-06-14 17:27:51,785 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 24ef34ed572a412fb4f23083bb7df6d7
2023-06-14 17:27:51,788 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,830 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@31417b95)
2023-06-14 17:27:51,830 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 1787ba34ea924740b31db1fd2a7a8b28
2023-06-14 17:27:51,833 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,871 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@711bc90b)
2023-06-14 17:27:51,872 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received b31a40ac77744e0fb8165c83934b6cf0
2023-06-14 17:27:51,874 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,912 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@3342fce7)
2023-06-14 17:27:51,913 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 40eb624688c54727a026b74a45c0fe49
2023-06-14 17:27:51,914 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:51,953 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@3b96bd74)
2023-06-14 17:27:51,953 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received cfe3c9d1739d4342a1a1e379deb4ec0c
2023-06-14 17:27:51,954 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:52,000 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@1b1f440)
2023-06-14 17:27:52,001 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 3649dce24a3e45519f9f14730f909ace
2023-06-14 17:27:52,002 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:52,143 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@2e4d26ec)
2023-06-14 17:27:52,144 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 0be14e338d30481881c5b58cf02a99e3
2023-06-14 17:27:52,145 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:52,189 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@acc4206)
2023-06-14 17:27:52,189 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received c0a02c97fdd34075ad7ceb830cb28e52
2023-06-14 17:27:52,191 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:52,232 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@43f2336)
2023-06-14 17:27:52,232 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received f862c0ffcf284bf2b8357368ba755ba7
2023-06-14 17:27:52,234 INFO [io.qua.mut.run.MutinyInfrastructure] (vert.x-eventloop-thread-3) {} Multi..0 | request(1)
2023-06-14 17:27:52,280 INFO [io.qua.mut.run.MutinyInfrastructure] (boundedElastic-1) {} Multi..0 | onItem(com.azure.messaging.servicebus.ServiceBusReceivedMessage@a22d405)
2023-06-14 17:27:52,280 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusConnector] (boundedElastic-1) {} Received 1344b27a8dac4ca39420831de2f00d47
2023-06-14 17:27:52,626 INFO [dk.ban.drb.ser.GenerateHistoryServiceBusConsumer] (in-history-1) {sampled=true, spanId=ebeaacbb82822529, traceId=e7549b9f53135ff66f7621b37c1ab969} Processing message: b4f16eb0798c459aaef6bb7f1b6bcaed
2023-06-14 17:27:52,626 INFO [dk.ban.drb.ser.GenerateHistoryServiceBusConsumer] (in-history-0) {sampled=true, spanId=490007faaf99b2bd, traceId=8d20cb5d3553d16afc17316635472b88} Processing message: bc9e6cbe4cb040d7a3e1a67c3b5faeaf
2023-06-14 17:27:52,678 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusIncomingMessage] (reactor-executor-1) {} Acked 9500cb1f42ed4b9a860ec35c4c89b172
2023-06-14 17:27:52,680 INFO [dk.ban.qua.rea.mes.asb.AzureServiceBusIncomingMessage] (reactor-executor-1) {} Acked f4149959456041eaa386605c11a5c536
```
How can I limit the subscriber to only request new items, when existing ones have been acked? I would expect the subscriber to request 2 items, process those two items, and then request another 2. Instead, the subscriber just constantly requests 1 item, resulting in my service not being able to keep up and eventually getting a variety of errors.
If the blocking annotation is ordered, then a new item is only requested once the current item has been acked.</div>
# Expected behavior
Messages requests should match configured worker threads concurrency.
# Actual behavior
From my understanding, the problem occurs at [this line](https://github.com/smallrye/smallrye-reactive-messaging/blob/1d2db081d7acf3f1307f445071702e058d6d295e/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java#LL146C28-L146C28).
`transformToUniAndMerge` will merge with the default concurrency of `Queues.BUFFER_S` which is 256. The merge will therefore fill up the buffer with 256 messages, despite the consumer worker thread having a lower concurrency. This becomes a problem with a slow consumer with messages being received in peek-lock mode. The consumer cannot keep up with the requests.
If I set the system property `mutiny.buffer-size.s` to match the concurrency of the worker threads, then messages are only being received as fast as they can be processed. Of course setting this will globally change the buffer size for all merges, which is not exactly great.
# Implementation ideas
Should the merge not match the concurrency of the configured worker threads? Or at the very least be configurable to do so? I'm thinking it should be something along the lines of:
```
this.publisher = decorate(MultiUtils.createFromGenerator(this::invokeBlocking)
.onItem().transformToUni(u -> u)
.withRequests(configuredRequestSize) // I'm not sure if this should be changed from the default 1. Maybe it could make sense to match the configured concurrency here as well, or allow it to be configured separately?
.merge(configuredConcurrency) // This will ensure the consumer can keep up
.onItem().transform(Message::of));
```
at [this ](https://github.com/smallrye/smallrye-reactive-messaging/blob/1d2db081d7acf3f1307f445071702e058d6d295e/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java#LL146C28-L146C28) and [this ](https://github.com/smallrye/smallrye-reactive-messaging/blob/1d2db081d7acf3f1307f445071702e058d6d295e/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java#LL166C1-L167C1) location.
# Version
I've tested this with Quarkus 2.16.7 with JDK17 and SmallRye Reactive Messaging 3.22.1. I haven't tested the latest version, but the code in question seems to be the same as in 3.22.1. | 7bc6e7bff27f8664a48ab3142dd7fe80b229d511 | d02a5a51230429d9a1aced64d6d783d06117eb1f | https://github.com/smallrye/smallrye-reactive-messaging/compare/7bc6e7bff27f8664a48ab3142dd7fe80b229d511...d02a5a51230429d9a1aced64d6d783d06117eb1f | diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/AbstractMediator.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/AbstractMediator.java
index 8bb6a5a57..0f252e74f 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/AbstractMediator.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/AbstractMediator.java
@@ -44,6 +44,7 @@ public abstract class AbstractMediator {
protected HealthCenter health;
private Instance<MessageConverter> converters;
private Instance<KeyValueExtractor> extractors;
+ private int maxConcurrency;
public AbstractMediator(MediatorConfiguration configuration) {
this.configuration = configuration;
@@ -99,6 +100,10 @@ public abstract class AbstractMediator {
this.workerPoolRegistry = workerPoolRegistry;
}
+ public void setMaxConcurrency(int maxConcurrency) {
+ this.maxConcurrency = maxConcurrency;
+ }
+
public void run() {
// Do nothing by default.
}
@@ -259,8 +264,11 @@ public abstract class AbstractMediator {
return extractors;
}
+ public int maxConcurrency() {
+ return maxConcurrency;
+ }
+
public void terminate() {
// Do nothing by default.
}
-
}
diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/ProcessorMediator.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/ProcessorMediator.java
index 822d76ccd..cde6e8afe 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/ProcessorMediator.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/ProcessorMediator.java
@@ -323,10 +323,11 @@ public class ProcessorMediator extends AbstractMediator {
this.mapper = upstream -> {
Multi<? extends Message<?>> multi = MultiUtils.handlePreProcessingAcknowledgement(upstream, configuration);
return multi
- .onItem().transformToMultiAndMerge(message -> invokeBlocking(message, getArguments(message))
+ .onItem().transformToMulti(message -> invokeBlocking(message, getArguments(message))
.onItemOrFailure()
.transformToUni((o, t) -> this.handlePostInvocationWithMessage((Message<?>) o, t))
- .onItem().transformToMulti(this::handleSkip));
+ .onItem().transformToMulti(this::handleSkip))
+ .merge(maxConcurrency());
};
}
@@ -358,9 +359,10 @@ public class ProcessorMediator extends AbstractMediator {
.onItem().transformToMulti(this::handleSkip));
} else {
this.mapper = upstream -> MultiUtils.handlePreProcessingAcknowledgement(upstream, configuration)
- .onItem().transformToMultiAndMerge(message -> invokeBlocking(message, getArguments(message))
+ .onItem().transformToMulti(message -> invokeBlocking(message, getArguments(message))
.onItemOrFailure().transformToUni((r, f) -> handlePostInvocation(message, r, f))
- .onItem().transformToMulti(this::handleSkip));
+ .onItem().transformToMulti(this::handleSkip))
+ .merge(maxConcurrency());
}
} else {
diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java
index c64807b4a..7b4d69beb 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java
@@ -143,7 +143,7 @@ public class PublisherMediator extends AbstractMediator {
.onItem().transform(o -> (Message<?>) o));
} else {
this.publisher = decorate(MultiUtils.createFromGenerator(this::invokeBlocking)
- .onItem().transformToUniAndMerge(u -> u)
+ .onItem().transformToUni(u -> u).merge(maxConcurrency())
.onItem().transform(o -> (Message<?>) o));
}
} else {
@@ -163,7 +163,7 @@ public class PublisherMediator extends AbstractMediator {
.onItem().transform(Message::of));
} else {
this.publisher = decorate(MultiUtils.createFromGenerator(this::invokeBlocking)
- .onItem().transformToUniAndMerge(u -> u)
+ .onItem().transformToUni(u -> u).merge(maxConcurrency())
.onItem().transform(Message::of));
}
} else {
diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/SubscriberMediator.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/SubscriberMediator.java
index 12f4fd6e0..090d24fad 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/SubscriberMediator.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/SubscriberMediator.java
@@ -165,8 +165,9 @@ public class SubscriberMediator extends AbstractMediator {
.invoke(failure -> health.reportApplicationFailure(configuration.methodAsString(), failure));
} else {
this.function = upstream -> MultiUtils.handlePreProcessingAcknowledgement(upstream, configuration)
- .onItem().transformToUniAndMerge(msg -> invokeBlocking(msg, getArguments(msg))
+ .onItem().transformToUni(msg -> invokeBlocking(msg, getArguments(msg))
.onItemOrFailure().transformToUni(handleInvocationResult(msg)))
+ .merge(maxConcurrency())
.onFailure()
.invoke(failure -> health.reportApplicationFailure(configuration.methodAsString(), failure));
}
@@ -210,7 +211,7 @@ public class SubscriberMediator extends AbstractMediator {
.onFailure().invoke(this::reportFailure);
} else {
this.function = upstream -> MultiUtils.handlePreProcessingAcknowledgement(upstream, configuration)
- .onItem().transformToUniAndMerge(this::invokeBlockingAndHandleOutcome)
+ .onItem().transformToUni(this::invokeBlockingAndHandleOutcome).merge(maxConcurrency())
.onFailure().invoke(this::reportFailure);
}
} else {
@@ -243,7 +244,7 @@ public class SubscriberMediator extends AbstractMediator {
.onFailure().invoke(this::reportFailure);
} else {
this.function = upstream -> MultiUtils.handlePreProcessingAcknowledgement(upstream, configuration)
- .onItem().transformToUniAndMerge(this::invokeBlockingAndHandleOutcome)
+ .onItem().transformToUni(this::invokeBlockingAndHandleOutcome).merge(maxConcurrency())
.onFailure().invoke(this::reportFailure);
}
} else {
diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/connectors/WorkerPoolRegistry.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/connectors/WorkerPoolRegistry.java
index 067db3403..13bdf4567 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/connectors/WorkerPoolRegistry.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/connectors/WorkerPoolRegistry.java
@@ -35,8 +35,8 @@ import io.vertx.mutiny.core.WorkerExecutor;
@ApplicationScoped
public class WorkerPoolRegistry {
- private static final String WORKER_CONFIG_PREFIX = "smallrye.messaging.worker";
- private static final String WORKER_CONCURRENCY = "max-concurrency";
+ public static final String WORKER_CONFIG_PREFIX = "smallrye.messaging.worker";
+ public static final String WORKER_CONCURRENCY = "max-concurrency";
@Inject
Instance<ExecutionHolder> executionHolder;
diff --git a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/extension/MediatorManager.java b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/extension/MediatorManager.java
index 20f2042ca..1d502baef 100644
--- a/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/extension/MediatorManager.java
+++ b/smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/extension/MediatorManager.java
@@ -1,5 +1,7 @@
package io.smallrye.reactive.messaging.providers.extension;
+import static io.smallrye.reactive.messaging.providers.connectors.WorkerPoolRegistry.WORKER_CONCURRENCY;
+import static io.smallrye.reactive.messaging.providers.connectors.WorkerPoolRegistry.WORKER_CONFIG_PREFIX;
import static io.smallrye.reactive.messaging.providers.i18n.ProviderLogging.log;
import java.lang.reflect.Constructor;
@@ -11,10 +13,12 @@ import jakarta.enterprise.inject.Instance;
import jakarta.enterprise.inject.spi.*;
import jakarta.inject.Inject;
+import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipse.microprofile.reactive.messaging.Incoming;
import org.eclipse.microprofile.reactive.messaging.Outgoing;
+import io.smallrye.mutiny.helpers.queues.Queues;
import io.smallrye.reactive.messaging.*;
import io.smallrye.reactive.messaging.EmitterConfiguration;
import io.smallrye.reactive.messaging.PublisherDecorator;
@@ -79,6 +83,9 @@ public class MediatorManager {
@ConfigProperty(name = STRICT_MODE_PROPERTY, defaultValue = "false")
boolean strictMode;
+ @Inject
+ Instance<Config> configInstance;
+
public <T> void analyze(AnnotatedType<T> annotatedType, Bean<T> bean) {
if (strictMode) {
@@ -97,6 +104,22 @@ public class MediatorManager {
});
}
+ private int getWorkerMaxConcurrency(MediatorConfiguration configuration) {
+ // max concurrency is not relevant if not blocking
+ if (!configuration.isBlocking()) {
+ return -1;
+ }
+ String poolName = configuration.getWorkerPoolName();
+ // if the poll name is null we are on the default worker pool, set the default concurrent requests
+ if (poolName == null) {
+ return Queues.BUFFER_S;
+ }
+ String concurrencyConfigKey = WORKER_CONFIG_PREFIX + "." + poolName + "." + WORKER_CONCURRENCY;
+ Optional<Integer> concurrency = configInstance.get().getOptionalValue(concurrencyConfigKey, Integer.class);
+ // Fallback to the default concurrent requests if setting is not found
+ return concurrency.orElse(Queues.BUFFER_S);
+ }
+
/**
* This method is used in the Quarkus extension.
*
@@ -177,6 +200,7 @@ public class MediatorManager {
mediator.setExtractors(extractors);
mediator.setHealth(health);
mediator.setWorkerPoolRegistry(workerPoolRegistry);
+ mediator.setMaxConcurrency(getWorkerMaxConcurrency(configuration));
try {
Object beanInstance = beanManager.getReference(configuration.getBean(), Object.class, | ['smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/ProcessorMediator.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/extension/MediatorManager.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/connectors/WorkerPoolRegistry.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/AbstractMediator.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/SubscriberMediator.java', 'smallrye-reactive-messaging-provider/src/main/java/io/smallrye/reactive/messaging/providers/PublisherMediator.java'] | {'.java': 6} | 6 | 6 | 0 | 0 | 6 | 1,982,862 | 398,738 | 52,686 | 592 | 3,666 | 676 | 59 | 6 | 13,786 | 1,041 | 4,473 | 135 | 4 | 5 | 1970-01-01T00:28:08 | 199 | Java | {'Java': 5169047, 'Shell': 1047} | Apache License 2.0 |
1,052 | google/ground-android/824/761 | google | ground-android | https://github.com/google/ground-android/issues/761 | https://github.com/google/ground-android/pull/824 | https://github.com/google/ground-android/pull/824 | 1 | fixes | [Add/edit observation] Changes not reflected on view observation page on save | 1. Edit an existing observation
2. Click save
User is returned to view observation page. Instead, they should be redirected to the feature sheet for consistency with "add observation" flow. | c2084feba9ba81bb2a10f0f028117704e8999d1f | 49206ba7120939b03c5a50d0804227b197616e26 | https://github.com/google/ground-android/compare/c2084feba9ba81bb2a10f0f028117704e8999d1f...49206ba7120939b03c5a50d0804227b197616e26 | diff --git a/gnd/src/main/java/com/google/android/gnd/persistence/local/room/RoomLocalDataStore.java b/gnd/src/main/java/com/google/android/gnd/persistence/local/room/RoomLocalDataStore.java
index 4d2f66d83..ea7f16df9 100644
--- a/gnd/src/main/java/com/google/android/gnd/persistence/local/room/RoomLocalDataStore.java
+++ b/gnd/src/main/java/com/google/android/gnd/persistence/local/room/RoomLocalDataStore.java
@@ -42,6 +42,7 @@ import com.google.android.gnd.model.layer.Layer;
import com.google.android.gnd.model.observation.Observation;
import com.google.android.gnd.model.observation.ObservationMutation;
import com.google.android.gnd.model.observation.ResponseMap;
+import com.google.android.gnd.model.observation.ResponseMap.Builder;
import com.google.android.gnd.persistence.local.LocalDataStore;
import com.google.android.gnd.persistence.local.room.converter.ResponseDeltasConverter;
import com.google.android.gnd.persistence.local.room.converter.ResponseMapConverter;
@@ -86,6 +87,7 @@ import io.reactivex.Flowable;
import io.reactivex.Maybe;
import io.reactivex.Observable;
import io.reactivex.Single;
+import io.reactivex.SingleSource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -431,13 +433,8 @@ public class RoomLocalDataStore implements LocalDataStore {
long clientTimestamp = lastMutation.getClientTimestamp();
Timber.v("Merging observation " + this + " with mutations " + mutations);
ObservationEntity.Builder builder = observation.toBuilder();
- ResponseMap.Builder responseMap =
- ResponseMapConverter.fromString(form, observation.getResponses()).toBuilder();
- for (ObservationMutationEntity mutation : mutations) {
- // Merge changes to responses.
- responseMap.applyDeltas(
- ResponseDeltasConverter.fromString(form, mutation.getResponseDeltas()));
- }
+ builder.setResponses(
+ ResponseMapConverter.toString(applyMutations(form, observation, mutations)));
// Update modified user and time.
AuditInfoEntity lastModified =
AuditInfoEntity.builder()
@@ -449,6 +446,18 @@ public class RoomLocalDataStore implements LocalDataStore {
return builder.build();
}
+ private ResponseMap applyMutations(
+ Form form, ObservationEntity observation, List<ObservationMutationEntity> mutations) {
+ Builder responseMap =
+ ResponseMapConverter.fromString(form, observation.getResponses()).toBuilder();
+ for (ObservationMutationEntity mutation : mutations) {
+ // Merge changes to responses.
+ responseMap.applyDeltas(
+ ResponseDeltasConverter.fromString(form, mutation.getResponseDeltas()));
+ }
+ return responseMap.build();
+ }
+
private Completable apply(FeatureMutation mutation) throws LocalDataStoreException {
switch (mutation.getType()) {
case CREATE:
@@ -539,13 +548,23 @@ public class RoomLocalDataStore implements LocalDataStore {
return observationDao
.findById(mutation.getObservationId())
.doOnSubscribe(__ -> Timber.v("Applying mutation: %s", mutation))
- // Emit NoSuchElementException if not found.
- .toSingle()
+ .switchIfEmpty(fallbackObservation(mutation))
.map(obs -> applyMutations(mutation.getForm(), obs, ImmutableList.of(mutationEntity), user))
.flatMapCompletable(obs -> observationDao.insertOrUpdate(obs).subscribeOn(schedulers.io()))
.subscribeOn(schedulers.io());
}
+ /**
+ * Returns a source which creates an observation based on the provided mutation. Used in rare
+ * cases when the observation is no longer in the local db, but the user is updating rather than
+ * creating a new observation. In these cases creation metadata is unknown, so empty audit info is
+ * used.
+ */
+ private SingleSource<ObservationEntity> fallbackObservation(ObservationMutation mutation) {
+ return em ->
+ em.onSuccess(ObservationEntity.fromMutation(mutation, AuditInfo.builder().build()));
+ }
+
private Completable markObservationForDeletion(
ObservationEntity entity, ObservationMutation mutation) {
return observationDao
diff --git a/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java b/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
index c11eb2343..31253940a 100644
--- a/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
+++ b/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
@@ -156,7 +156,7 @@ public class ObservationRepository {
return applyAndEnqueue(observationMutation);
}
- public Completable addObservationMutation(
+ public Completable createOrUpdateObservation(
Observation observation, ImmutableList<ResponseDelta> responseDeltas, boolean isNew) {
ObservationMutation observationMutation =
ObservationMutation.builder()
diff --git a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
index e9c0c7e4a..d35366e68 100644
--- a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
+++ b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
@@ -277,7 +277,7 @@ public class EditObservationViewModel extends AbstractViewModel {
}
return observationRepository
- .addObservationMutation(originalObservation, getResponseDeltas(), isNew)
+ .createOrUpdateObservation(originalObservation, getResponseDeltas(), isNew)
.doOnSubscribe(__ -> isSaving.postValue(true))
.doOnComplete(() -> isSaving.postValue(false))
.toSingleDefault(Event.create(SaveResult.SAVED));
diff --git a/gnd/src/test/java/com/google/android/gnd/persistence/local/LocalDataStoreTest.java b/gnd/src/test/java/com/google/android/gnd/persistence/local/LocalDataStoreTest.java
index c93b90ea1..ef4a3f6fb 100644
--- a/gnd/src/test/java/com/google/android/gnd/persistence/local/LocalDataStoreTest.java
+++ b/gnd/src/test/java/com/google/android/gnd/persistence/local/LocalDataStoreTest.java
@@ -131,7 +131,7 @@ public class LocalDataStoreTest {
ResponseDelta.builder()
.setFieldId("field id")
.setFieldType(Field.Type.TEXT_FIELD)
- .setNewResponse(TextResponse.fromString("response for field id"))
+ .setNewResponse(TextResponse.fromString("updated response"))
.build()))
.setClientTimestamp(new Date())
.build();
@@ -437,7 +437,7 @@ public class LocalDataStoreTest {
.values()
.get(0)
.getResponses();
- assertThat("foo value").isEqualTo(responses.getResponse("field id").get().toString());
+ assertThat("updated response").isEqualTo(responses.getResponse("field id").get().toString());
}
@Test | ['gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java', 'gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java', 'gnd/src/main/java/com/google/android/gnd/persistence/local/room/RoomLocalDataStore.java', 'gnd/src/test/java/com/google/android/gnd/persistence/local/LocalDataStoreTest.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 804,667 | 165,844 | 23,726 | 277 | 2,004 | 388 | 41 | 3 | 193 | 30 | 39 | 4 | 0 | 0 | 1970-01-01T00:27:01 | 191 | Kotlin | {'Kotlin': 1134095, 'Java': 81131, 'Python': 3111} | Apache License 2.0 |
1,050 | google/ground-android/863/849 | google | ground-android | https://github.com/google/ground-android/issues/849 | https://github.com/google/ground-android/pull/863 | https://github.com/google/ground-android/pull/863 | 1 | closes | [Feature details] Observation sync sometimes times out | The timeout may need to be increased for slower internet connections:
```
2021-06-04 11:45:26.204 2125-9557/com.google.android.gnd E/ObservationRepository: Observation sync timed out
java.util.concurrent.TimeoutException: The source did not signal an event for 5 seconds and has been terminated.
at io.reactivex.internal.operators.single.SingleTimeout$TimeoutMainObserver.run(SingleTimeout.java:123)
at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:38)
at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:26)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:301)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
at java.lang.Thread.run(Thread.java:923)
``` | 8f44db6783bbc1b1561c970ffbf0188b2278506a | 64e9192139626107228022ee6e10cda386f44f8d | https://github.com/google/ground-android/compare/8f44db6783bbc1b1561c970ffbf0188b2278506a...64e9192139626107228022ee6e10cda386f44f8d | diff --git a/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java b/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
index 31253940a..3a438ffe9 100644
--- a/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
+++ b/gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java
@@ -45,7 +45,7 @@ import timber.log.Timber;
*/
public class ObservationRepository {
- private static final long LOAD_REMOTE_OBSERVATIONS_TIMEOUT_SECS = 5;
+ private static final long LOAD_REMOTE_OBSERVATIONS_TIMEOUT_SECS = 15;
private final LocalDataStore localDataStore;
private final RemoteDataStore remoteDataStore;
diff --git a/gnd/src/main/java/com/google/android/gnd/repository/ProjectRepository.java b/gnd/src/main/java/com/google/android/gnd/repository/ProjectRepository.java
index 727e6568f..0c0965555 100644
--- a/gnd/src/main/java/com/google/android/gnd/repository/ProjectRepository.java
+++ b/gnd/src/main/java/com/google/android/gnd/repository/ProjectRepository.java
@@ -53,7 +53,7 @@ import timber.log.Timber;
@Singleton
public class ProjectRepository {
- private static final long LOAD_REMOTE_PROJECT_TIMEOUT_SECS = 5;
+ private static final long LOAD_REMOTE_PROJECT_TIMEOUT_SECS = 15;
private static final long LOAD_REMOTE_PROJECT_SUMMARIES_TIMEOUT_SECS = 30;
private final InMemoryCache cache; | ['gnd/src/main/java/com/google/android/gnd/repository/ObservationRepository.java', 'gnd/src/main/java/com/google/android/gnd/repository/ProjectRepository.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 830,169 | 170,990 | 24,426 | 284 | 278 | 64 | 4 | 2 | 1,046 | 52 | 218 | 14 | 0 | 1 | 1970-01-01T00:27:02 | 191 | Kotlin | {'Kotlin': 1134095, 'Java': 81131, 'Python': 3111} | Apache License 2.0 |
1,047 | google/ground-android/1014/1013 | google | ground-android | https://github.com/google/ground-android/issues/1013 | https://github.com/google/ground-android/pull/1014 | https://github.com/google/ground-android/pull/1014 | 1 | fixes | [Join Project] No such element exception | **Describe the bug**
When joining a project, Ground sometimes throws a "No Such Element" exception when setting map features.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to 'Join Project'
2. Click on a project
4. See error (potentially)
**Stacktrace**
```
2021-09-30 16:46:29.082 5636-5636/com.google.android.gnd E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.google.android.gnd, PID: 5636
java.util.NoSuchElementException
at java.util.HashMap$HashIterator.nextNode(HashMap.java:1443)
at java.util.HashMap$EntryIterator.next(HashMap.java:1475)
at java.util.HashMap$EntryIterator.next(HashMap.java:1473)
at com.google.android.gnd.ui.map.gms.GoogleMapsMapAdapter.setMapFeatures(GoogleMapsMapAdapter.java:359)
at com.google.android.gnd.ui.home.mapcontainer.-$$Lambda$Gpu9kEZxCjyf8AhAC-rOJIqwKEI.onChanged(Unknown Source:4)
at androidx.lifecycle.LiveData.considerNotify(LiveData.java:133)
at androidx.lifecycle.LiveData.dispatchingValue(LiveData.java:151)
at androidx.lifecycle.LiveData.setValue(LiveData.java:309)
at androidx.lifecycle.LiveData$1.run(LiveData.java:93)
at android.os.Handler.handleCallback(Handler.java:873)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:193)
at android.app.ActivityThread.main(ActivityThread.java:6669)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
``` | e21d63e885752cfbe1beb77a24752ae768e789e2 | 3a9cf8b81d4832870684e97944131b40b1da5923 | https://github.com/google/ground-android/compare/e21d63e885752cfbe1beb77a24752ae768e789e2...3a9cf8b81d4832870684e97944131b40b1da5923 | diff --git a/gnd/src/main/java/com/google/android/gnd/ui/map/gms/GoogleMapsMapAdapter.java b/gnd/src/main/java/com/google/android/gnd/ui/map/gms/GoogleMapsMapAdapter.java
index 3327fe2c3..e31cdfc5a 100644
--- a/gnd/src/main/java/com/google/android/gnd/ui/map/gms/GoogleMapsMapAdapter.java
+++ b/gnd/src/main/java/com/google/android/gnd/ui/map/gms/GoogleMapsMapAdapter.java
@@ -166,8 +166,7 @@ class GoogleMapsMapAdapter implements MapAdapter {
Builder<MapFeature> candidates = ImmutableList.builder();
ArrayList<String> processed = new ArrayList<>();
- for (Entry<MapFeature, GeometryCollection> geoJsonEntry :
- geoJsonGeometries.entrySet()) {
+ for (Entry<MapFeature, GeometryCollection> geoJsonEntry : geoJsonGeometries.entrySet()) {
MapGeoJson geoJsonFeature = (MapGeoJson) geoJsonEntry.getKey();
GeometryCollection geoJsonGeometry = geoJsonEntry.getValue();
if (processed.contains(geoJsonFeature.getId())) {
@@ -279,8 +278,7 @@ class GoogleMapsMapAdapter implements MapAdapter {
LatLng position = toLatLng(mapPin.getPosition());
String color = mapPin.getStyle().getColor();
BitmapDescriptor icon = markerIconFactory.getMarkerIcon(parseColor(color));
- Marker marker =
- map.addMarker(new MarkerOptions().position(position).icon(icon).alpha(1.0f));
+ Marker marker = map.addMarker(new MarkerOptions().position(position).icon(icon).alpha(1.0f));
markers.add(marker);
marker.setTag(mapPin);
}
@@ -355,8 +353,9 @@ class GoogleMapsMapAdapter implements MapAdapter {
Iterator<Entry<MapFeature, Polyline>> polylineIterator = polygons.entrySet().iterator();
while (polylineIterator.hasNext()) {
- MapFeature mapFeature = polylineIterator.next().getKey();
- Polyline polyline = polylineIterator.next().getValue();
+ Entry<MapFeature, Polyline> entry = polylineIterator.next();
+ MapFeature mapFeature = entry.getKey();
+ Polyline polyline = entry.getValue();
if (features.contains(mapFeature)) {
// If polygon already exists on map, don't add it.
featuresToUpdate.remove(mapFeature);
@@ -484,9 +483,7 @@ class GoogleMapsMapAdapter implements MapAdapter {
stream(urls).forEach(this::addRemoteTileOverlay);
}
- /**
- * A collection of geometries in a GeoJson feature.
- */
+ /** A collection of geometries in a GeoJson feature. */
private class GeometryCollection {
List<Marker> markers = new ArrayList<>();
List<Polyline> polylines = new ArrayList<>();
@@ -549,10 +546,7 @@ class GoogleMapsMapAdapter implements MapAdapter {
}
private Marker addMarker(GeoJsonPoint point) {
- return map.addMarker(
- new MarkerOptions()
- .zIndex(1)
- .position(point.getCoordinates()));
+ return map.addMarker(new MarkerOptions().zIndex(1).position(point.getCoordinates()));
}
private Polyline addPolyline(GeoJsonLineString lineString, float width, int color) {
@@ -565,12 +559,13 @@ class GoogleMapsMapAdapter implements MapAdapter {
}
private Polygon addPolygon(GeoJsonPolygon dataPolygon, float width, int color) {
- PolygonOptions polygonOptions = new PolygonOptions()
- .addAll(dataPolygon.getOuterBoundaryCoordinates())
- .strokeWidth(width)
- .strokeColor(color)
- .clickable(false)
- .zIndex(1);
+ PolygonOptions polygonOptions =
+ new PolygonOptions()
+ .addAll(dataPolygon.getOuterBoundaryCoordinates())
+ .strokeWidth(width)
+ .strokeColor(color)
+ .clickable(false)
+ .zIndex(1);
for (List<LatLng> innerBoundary : dataPolygon.getInnerBoundaryCoordinates()) {
polygonOptions.addHole(innerBoundary);
}
@@ -583,7 +578,7 @@ class GoogleMapsMapAdapter implements MapAdapter {
GeoJsonParser geoJsonParser = new GeoJsonParser(mapFeature.getGeoJson());
GeometryCollection featureGeometries = new GeometryCollection();
geoJsonGeometries.put(mapFeature, featureGeometries);
- for (GeoJsonFeature geoJsonFeature: geoJsonParser.getFeatures()) {
+ for (GeoJsonFeature geoJsonFeature : geoJsonParser.getFeatures()) {
featureGeometries.addGeometry(mapFeature, geoJsonFeature.getGeometry());
}
} | ['gnd/src/main/java/com/google/android/gnd/ui/map/gms/GoogleMapsMapAdapter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 946,493 | 194,933 | 27,702 | 309 | 1,699 | 349 | 35 | 1 | 1,670 | 89 | 404 | 32 | 0 | 1 | 1970-01-01T00:27:13 | 191 | Kotlin | {'Kotlin': 1134095, 'Java': 81131, 'Python': 3111} | Apache License 2.0 |
1,046 | google/ground-android/1076/1068 | google | ground-android | https://github.com/google/ground-android/issues/1068 | https://github.com/google/ground-android/pull/1076 | https://github.com/google/ground-android/pull/1076 | 1 | fixes | Cleared photo reappears on rotating device | **Describe the bug**
The photo response reappears even after removing it manually. This only happens if the screen is rotated after removing the photo.
**To Reproduce**
Steps to reproduce the behavior:
1. Select any project which contains a photo field
2. Create a new observation and click on add photo
3. Select a new photo
4. The selected/captured photo should be visible in the edit fragment.
5. Now, remove the photo and rotate the screen.
**Expected behavior**
Removed photo should not reappear
**Actual behavior**
We can see that the photo is visible.
| d8406cee9f64fffe9a1102db8b80eab7f51bd8ef | b786bac1f625febdcefa6d52a7dfb3d60ad86098 | https://github.com/google/ground-android/compare/d8406cee9f64fffe9a1102db8b80eab7f51bd8ef...b786bac1f625febdcefa6d52a7dfb3d60ad86098 | diff --git a/gnd/src/main/java/com/google/android/gnd/persistence/remote/firestore/FirestoreStorageManager.java b/gnd/src/main/java/com/google/android/gnd/persistence/remote/firestore/FirestoreStorageManager.java
index 42b5fcbe3..aed32051b 100644
--- a/gnd/src/main/java/com/google/android/gnd/persistence/remote/firestore/FirestoreStorageManager.java
+++ b/gnd/src/main/java/com/google/android/gnd/persistence/remote/firestore/FirestoreStorageManager.java
@@ -17,7 +17,6 @@
package com.google.android.gnd.persistence.remote.firestore;
import android.net.Uri;
-import com.google.android.gnd.model.observation.Observation;
import com.google.android.gnd.persistence.remote.RemoteStorageManager;
import com.google.android.gnd.persistence.remote.TransferProgress;
import com.google.android.gnd.rx.RxTask;
@@ -46,19 +45,19 @@ public class FirestoreStorageManager implements RemoteStorageManager {
FirestoreStorageManager() {}
/**
- * Generates destination path in which an observation attachement is to be stored in to Cloud
+ * Generates destination path in which an observation attachment is to be stored in to Cloud
* Storage.
*
* <p>user-media/projects/{project_id}/observations/{observation_id}/{field_id-uuid.jpg}
*/
- public static String getRemoteMediaPath(Observation observation, String filename) {
+ public static String getRemoteMediaPath(String projectId, String observationId, String filename) {
// TODO: Refactor this into MediaStorageRepository.
return new StringJoiner(File.separator)
.add(MEDIA_ROOT_DIR)
.add("projects")
- .add(observation.getProject().getId())
+ .add(projectId)
.add("observations")
- .add(observation.getId())
+ .add(observationId)
.add(filename)
.toString();
}
diff --git a/gnd/src/main/java/com/google/android/gnd/repository/UserMediaRepository.java b/gnd/src/main/java/com/google/android/gnd/repository/UserMediaRepository.java
index 2bb277ad7..9d43c6766 100644
--- a/gnd/src/main/java/com/google/android/gnd/repository/UserMediaRepository.java
+++ b/gnd/src/main/java/com/google/android/gnd/repository/UserMediaRepository.java
@@ -22,7 +22,6 @@ import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import com.google.android.gnd.Config;
-import com.google.android.gnd.model.form.Field;
import com.google.android.gnd.persistence.remote.RemoteStorageManager;
import com.google.android.gnd.persistence.uuid.OfflineUuidGenerator;
import com.google.android.gnd.rx.annotations.Cold;
@@ -61,12 +60,12 @@ public class UserMediaRepository {
return context.getExternalFilesDir(Environment.DIRECTORY_PICTURES);
}
- public String createImageFilename(Field field) {
- return field.getId() + "-" + uuidGenerator.generateUuid() + Config.PHOTO_EXT;
+ public String createImageFilename(String fieldId) {
+ return fieldId + "-" + uuidGenerator.generateUuid() + Config.PHOTO_EXT;
}
- public File createImageFile(Field field) {
- return new File(getRootDir(), createImageFilename(field));
+ public File createImageFile(String fieldId) {
+ return new File(getRootDir(), createImageFilename(fieldId));
}
/**
@@ -74,8 +73,8 @@ public class UserMediaRepository {
*
* @throws IOException If path is not accessible or error occurs while saving file
*/
- public File savePhoto(Bitmap bitmap, Field field) throws IOException {
- File file = createImageFile(field);
+ public File savePhoto(Bitmap bitmap, String fieldId) throws IOException {
+ File file = createImageFile(fieldId);
try (FileOutputStream fos = new FileOutputStream(file)) {
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
}
diff --git a/gnd/src/main/java/com/google/android/gnd/system/CameraManager.java b/gnd/src/main/java/com/google/android/gnd/system/CameraManager.java
deleted file mode 100644
index bd14cd1ef..000000000
--- a/gnd/src/main/java/com/google/android/gnd/system/CameraManager.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright 2020 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.google.android.gnd.system;
-
-import android.Manifest.permission;
-import android.content.Context;
-import android.content.Intent;
-import android.net.Uri;
-import android.provider.MediaStore;
-import androidx.core.content.FileProvider;
-import com.google.android.gnd.BuildConfig;
-import com.google.android.gnd.rx.Nil;
-import com.google.android.gnd.rx.annotations.Cold;
-import com.google.android.gnd.rx.annotations.Hot;
-import dagger.hilt.android.qualifiers.ApplicationContext;
-import io.reactivex.Completable;
-import io.reactivex.Maybe;
-import java.io.File;
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import timber.log.Timber;
-
-/** Manages permissions needed for using camera and related flows to/from Activity. */
-@Singleton
-public class CameraManager {
-
- /** Used to identify requests coming from this application. */
- static final int CAPTURE_PHOTO_REQUEST_CODE = CameraManager.class.hashCode() & 0xffff;
-
- private final Context context;
- private final PermissionsManager permissionsManager;
- private final ActivityStreams activityStreams;
-
- @Inject
- public CameraManager(
- @ApplicationContext Context context,
- PermissionsManager permissionsManager,
- ActivityStreams activityStreams) {
- this.context = context;
- this.permissionsManager = permissionsManager;
- this.activityStreams = activityStreams;
- }
-
- /** Launches the system's photo capture flow, first obtaining permissions if necessary. */
- @Cold
- public Maybe<Nil> capturePhoto(File destFile) {
- return permissionsManager
- .obtainPermission(permission.WRITE_EXTERNAL_STORAGE)
- .andThen(permissionsManager.obtainPermission(permission.CAMERA))
- .andThen(sendCapturePhotoIntent(destFile))
- .andThen(capturePhotoResult());
- }
-
- /** Enqueue an intent for capturing a photo. */
- @Cold
- private Completable sendCapturePhotoIntent(File photoFile) {
- return Completable.fromAction(
- () ->
- activityStreams.withActivity(
- activity -> {
- Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
- Uri photoUri =
- FileProvider.getUriForFile(context, BuildConfig.APPLICATION_ID, photoFile);
- cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoUri);
- activity.startActivityForResult(cameraIntent, CAPTURE_PHOTO_REQUEST_CODE);
- Timber.v("Capture photo intent sent. Image path: %s", photoFile);
- }));
- }
-
- /** Emits the result of the photo capture request. */
- @Hot(terminates = true)
- Maybe<Nil> capturePhotoResult() {
- return activityStreams
- .getNextActivityResult(CAPTURE_PHOTO_REQUEST_CODE)
- .flatMapMaybe(this::onCapturePhotoResult)
- .singleElement();
- }
-
- /** Returns success if the result is ok. */
- @Cold
- private Maybe<Nil> onCapturePhotoResult(ActivityResult result) {
- Timber.v("Photo result returned");
- return Maybe.create(
- emitter -> {
- if (result.isOk()) {
- emitter.onSuccess(Nil.NIL);
- } else {
- emitter.onComplete();
- }
- });
- }
-}
diff --git a/gnd/src/main/java/com/google/android/gnd/system/StorageManager.java b/gnd/src/main/java/com/google/android/gnd/system/StorageManager.java
index 6e2ba15f5..1e66b2e29 100644
--- a/gnd/src/main/java/com/google/android/gnd/system/StorageManager.java
+++ b/gnd/src/main/java/com/google/android/gnd/system/StorageManager.java
@@ -21,6 +21,7 @@ import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
import com.google.android.gnd.rx.annotations.Cold;
import com.google.android.gnd.rx.annotations.Hot;
import com.google.android.gnd.ui.util.BitmapUtil;
@@ -35,6 +36,7 @@ import timber.log.Timber;
@Singleton
public class StorageManager {
+ @VisibleForTesting
static final int PICK_PHOTO_REQUEST_CODE = StorageManager.class.hashCode() & 0xffff;
private final PermissionsManager permissionsManager;
diff --git a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationFragment.java b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationFragment.java
index 3d39daade..be19f73f5 100644
--- a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationFragment.java
+++ b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationFragment.java
@@ -31,6 +31,9 @@ import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.LinearLayout;
+import androidx.activity.result.ActivityResultLauncher;
+import androidx.activity.result.contract.ActivityResultContracts.GetContent;
+import androidx.activity.result.contract.ActivityResultContracts.TakePicturePreview;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.databinding.ViewDataBinding;
@@ -58,6 +61,7 @@ import com.google.android.gnd.ui.common.BackPressListener;
import com.google.android.gnd.ui.common.EphemeralPopups;
import com.google.android.gnd.ui.common.Navigator;
import com.google.android.gnd.ui.common.TwoLineToolbar;
+import com.google.android.gnd.ui.util.BitmapUtil;
import com.google.android.material.bottomsheet.BottomSheetDialog;
import com.google.common.collect.ImmutableList;
import dagger.hilt.android.AndroidEntryPoint;
@@ -74,10 +78,19 @@ import timber.log.Timber;
@AndroidEntryPoint
public class EditObservationFragment extends AbstractFragment implements BackPressListener {
- private static final String RESTORED_RESPONSES_BUNDLE_KEY = "restoredResponses";
+ /** String constant keys used for persisting state in {@see Bundle} objects. */
+ private static final class BundleKeys {
+
+ /** Key used to store unsaved responses across activity re-creation. */
+ private static final String RESTORED_RESPONSES = "restoredResponses";
+
+ /** Key used to store field ID waiting for photo response across activity re-creation. */
+ private static final String FIELD_WAITING_FOR_PHOTO = "photoFieldId";
+ }
private final List<AbstractFieldViewModel> fieldViewModelList = new ArrayList<>();
+ @Inject BitmapUtil bitmapUtil;
@Inject Navigator navigator;
@Inject FieldViewFactory fieldViewFactory;
@Inject EphemeralPopups popups;
@@ -86,6 +99,9 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
private EditObservationViewModel viewModel;
private EditObservationFragBinding binding;
+ private ActivityResultLauncher<String> selectPhotoLauncher;
+ private ActivityResultLauncher<Void> capturePhotoLauncher;
+
private static AbstractFieldViewModel getViewModel(ViewDataBinding binding) {
if (binding instanceof TextInputFieldBinding) {
return ((TextInputFieldBinding) binding).getViewModel();
@@ -108,6 +124,10 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
viewModel = getViewModel(EditObservationViewModel.class);
+ selectPhotoLauncher =
+ registerForActivityResult(new GetContent(), viewModel::onSelectPhotoResult);
+ capturePhotoLauncher =
+ registerForActivityResult(new TakePicturePreview(), viewModel::onCapturePhotoResult);
}
@Override
@@ -139,8 +159,10 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
Bundle args = getArguments();
if (savedInstanceState != null) {
args.putSerializable(
- RESTORED_RESPONSES_BUNDLE_KEY,
- savedInstanceState.getSerializable(RESTORED_RESPONSES_BUNDLE_KEY));
+ BundleKeys.RESTORED_RESPONSES,
+ savedInstanceState.getSerializable(BundleKeys.RESTORED_RESPONSES));
+ viewModel.setFieldWaitingForPhoto(
+ savedInstanceState.getString(BundleKeys.FIELD_WAITING_FOR_PHOTO));
}
viewModel.initialize(EditObservationFragmentArgs.fromBundle(args));
}
@@ -148,7 +170,8 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
- outState.putSerializable(RESTORED_RESPONSES_BUNDLE_KEY, viewModel.getDraftResponses());
+ outState.putSerializable(BundleKeys.RESTORED_RESPONSES, viewModel.getDraftResponses());
+ outState.putString(BundleKeys.FIELD_WAITING_FOR_PHOTO, viewModel.getFieldWaitingForPhoto());
}
private void handleSaveResult(EditObservationViewModel.SaveResult saveResult) {
@@ -289,8 +312,10 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
private void initPhotoField(PhotoFieldViewModel photoFieldViewModel) {
photoFieldViewModel.setEditable(true);
+ photoFieldViewModel.setProjectId(viewModel.getProjectId());
+ photoFieldViewModel.setObservationId(viewModel.getObservationId());
observeSelectPhotoClicks(photoFieldViewModel);
- observePhotoAdded(photoFieldViewModel);
+ observePhotoResults(photoFieldViewModel);
}
private void observeSelectPhotoClicks(PhotoFieldViewModel fieldViewModel) {
@@ -299,18 +324,11 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
.observe(this, __ -> onShowPhotoSelectorDialog(fieldViewModel.getField()));
}
- private void observePhotoAdded(PhotoFieldViewModel fieldViewModel) {
+ private void observePhotoResults(PhotoFieldViewModel fieldViewModel) {
viewModel
- .getPhotoFieldUpdates()
- .observe(
- this,
- map -> {
- // TODO: Do not set response if already handled.
- Field field = fieldViewModel.getField();
- if (map.containsKey(field)) {
- fieldViewModel.updateResponse(map.get(field));
- }
- });
+ .getLastPhotoResult()
+ .as(autoDisposable(getViewLifecycleOwner()))
+ .subscribe(fieldViewModel::onPhotoResult);
}
private void onShowPhotoSelectorDialog(Field field) {
@@ -378,16 +396,34 @@ public class EditObservationFragment extends AbstractFragment implements BackPre
private void onSelectPhotoClick(int type, Field field) {
switch (type) {
case PHOTO_SOURCE_CAMERA:
- viewModel.showPhotoCapture(field);
+ viewModel
+ .obtainCapturePhotoPermissions()
+ .as(autoDisposable(getViewLifecycleOwner()))
+ .subscribe(() -> launchPhotoCapture(field.getId()));
break;
case PHOTO_SOURCE_STORAGE:
- viewModel.showPhotoSelector(field);
+ viewModel
+ .obtainSelectPhotoPermissions()
+ .as(autoDisposable(getViewLifecycleOwner()))
+ .subscribe(() -> launchPhotoSelector(field.getId()));
break;
default:
throw new IllegalArgumentException("Unknown type: " + type);
}
}
+ private void launchPhotoCapture(String fieldId) {
+ viewModel.setFieldWaitingForPhoto(fieldId);
+ capturePhotoLauncher.launch(null);
+ Timber.d("Capture photo intent sent");
+ }
+
+ private void launchPhotoSelector(String fieldId) {
+ viewModel.setFieldWaitingForPhoto(fieldId);
+ selectPhotoLauncher.launch("image/*");
+ Timber.d("Select photo intent sent");
+ }
+
@Override
public boolean onBack() {
if (viewModel.hasUnsavedChanges()) {
diff --git a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
index 08ee9c516..f890a7076 100644
--- a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
+++ b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java
@@ -16,11 +16,14 @@
package com.google.android.gnd.ui.editobservation;
+import static android.Manifest.permission.CAMERA;
+import static android.Manifest.permission.READ_EXTERNAL_STORAGE;
+import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
import static androidx.lifecycle.LiveDataReactiveStreams.fromPublisher;
-import static com.google.android.gnd.persistence.remote.firestore.FirestoreStorageManager.getRemoteMediaPath;
-import static com.google.common.base.Preconditions.checkNotNull;
import android.content.res.Resources;
+import android.graphics.Bitmap;
+import android.net.Uri;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import com.google.android.gnd.R;
@@ -33,22 +36,23 @@ import com.google.android.gnd.model.observation.Response;
import com.google.android.gnd.model.observation.ResponseDelta;
import com.google.android.gnd.model.observation.ResponseMap;
import com.google.android.gnd.repository.ObservationRepository;
-import com.google.android.gnd.repository.UserMediaRepository;
import com.google.android.gnd.rx.Nil;
import com.google.android.gnd.rx.annotations.Cold;
import com.google.android.gnd.rx.annotations.Hot;
-import com.google.android.gnd.system.CameraManager;
-import com.google.android.gnd.system.StorageManager;
+import com.google.android.gnd.system.PermissionsManager;
import com.google.android.gnd.ui.common.AbstractViewModel;
+import com.google.android.gnd.ui.util.BitmapUtil;
+import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import io.reactivex.Completable;
import io.reactivex.Observable;
import io.reactivex.Single;
import io.reactivex.processors.BehaviorProcessor;
import io.reactivex.processors.FlowableProcessor;
import io.reactivex.processors.PublishProcessor;
-import java.io.File;
+import io.reactivex.subjects.BehaviorSubject;
+import io.reactivex.subjects.Subject;
+import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
@@ -63,9 +67,8 @@ public class EditObservationViewModel extends AbstractViewModel {
private final ObservationRepository observationRepository;
private final Resources resources;
- private final UserMediaRepository userMediaRepository;
- private final StorageManager storageManager;
- private final CameraManager cameraManager;
+ private final PermissionsManager permissionsManager;
+ private final BitmapUtil bitmapUtil;
// States.
@@ -101,6 +104,13 @@ public class EditObservationViewModel extends AbstractViewModel {
/** True if the observation is being added, false if editing an existing one. */
private boolean isNew;
+ /**
+ * Emits the last photo field id updated and either its photo result, or empty if removed. The
+ * last value is emitted on each subscription because {@see #onPhotoResult} is called before
+ * subscribers are created.
+ */
+ private Subject<PhotoResult> lastPhotoResult = BehaviorSubject.create();
+
// Events.
/** "Save" button clicks. */
@@ -109,22 +119,18 @@ public class EditObservationViewModel extends AbstractViewModel {
/** Outcome of user clicking "Save". */
private final Observable<SaveResult> saveResults;
- /** Stream of updates to photo fields. */
- @Hot(replays = true)
- private final MutableLiveData<ImmutableMap<Field, String>> photoUpdates = new MutableLiveData<>();
+ @Nullable private String fieldWaitingForPhoto;
@Inject
EditObservationViewModel(
Resources resources,
ObservationRepository observationRepository,
- UserMediaRepository userMediaRepository,
- StorageManager storageManager,
- CameraManager cameraManager) {
+ PermissionsManager permissionsManager,
+ BitmapUtil bitmapUtil) {
this.resources = resources;
this.observationRepository = observationRepository;
- this.userMediaRepository = userMediaRepository;
- this.storageManager = storageManager;
- this.cameraManager = cameraManager;
+ this.permissionsManager = permissionsManager;
+ this.bitmapUtil = bitmapUtil;
this.form = fromPublisher(viewArgs.switchMapSingle(this::onInitialize));
this.saveResults = saveClicks.toObservable().switchMapSingle(__ -> onSave());
}
@@ -145,6 +151,14 @@ public class EditObservationViewModel extends AbstractViewModel {
return saveResults;
}
+ public @Nullable String getProjectId() {
+ return originalObservation == null ? null : originalObservation.getProject().getId();
+ }
+
+ public @Nullable String getObservationId() {
+ return originalObservation == null ? null : originalObservation.getId();
+ }
+
void initialize(EditObservationFragmentArgs args) {
viewArgs.onNext(args);
}
@@ -162,55 +176,16 @@ public class EditObservationViewModel extends AbstractViewModel {
r -> responses.put(field.getId(), r), () -> responses.remove(field.getId()));
}
- public void showPhotoSelector(Field field) {
- /*
- * Didn't subscribe this with Fragment's lifecycle because we need to retain the disposable
- * after the fragment is destroyed (for activity result)
- */
- // TODO: launch intent through fragment and handle activity result callbacks async
- disposeOnClear(
- storageManager
- .selectPhoto()
- .doOnError(Timber::e) // TODO(#726): Display as a toast
- .map(bitmap -> userMediaRepository.savePhoto(bitmap, field))
- .flatMapCompletable(file -> onPhotoSaved(field, file))
- .subscribe());
- }
-
- public void showPhotoCapture(Field field) {
- File imageFile = userMediaRepository.createImageFile(field);
-
- /*
- * Didn't subscribe this with Fragment's lifecycle because we need to retain the disposable
- * after the fragment is destroyed (for activity result)
- */
- // TODO: launch intent through fragment and handle activity result callbacks async
- disposeOnClear(
- cameraManager
- .capturePhoto(imageFile)
- .doOnError(Timber::e) // TODO(#726): Display as a toast
- .flatMapCompletable(__ -> onPhotoSaved(field, imageFile))
- .subscribe());
- }
-
@Cold
- private Completable onPhotoSaved(Field field, File imageFile) {
- return Completable.fromAction(
- () -> {
- String filename = imageFile.getName();
-
- // Add image to gallery
- userMediaRepository.addImageToGallery(imageFile.getAbsolutePath(), filename);
-
- // Update response
- checkNotNull(originalObservation);
- String remoteDestinationPath = getRemoteMediaPath(originalObservation, filename);
- photoUpdates.postValue(ImmutableMap.of(field, remoteDestinationPath));
- });
+ public Completable obtainCapturePhotoPermissions() {
+ return permissionsManager
+ .obtainPermission(WRITE_EXTERNAL_STORAGE)
+ .andThen(permissionsManager.obtainPermission(CAMERA));
}
- LiveData<ImmutableMap<Field, String>> getPhotoFieldUpdates() {
- return photoUpdates;
+ @Cold
+ public Completable obtainSelectPhotoPermissions() {
+ return permissionsManager.obtainPermission(READ_EXTERNAL_STORAGE);
}
public void onSaveClick(Map<String, String> validationErrors) {
@@ -336,9 +311,53 @@ public class EditObservationViewModel extends AbstractViewModel {
}
public Serializable getDraftResponses() {
- HashMap<String, Response> hashMap = new HashMap<>();
- hashMap.putAll(responses);
- return hashMap;
+ return new HashMap<>(responses);
+ }
+
+ @Nullable
+ public String getFieldWaitingForPhoto() {
+ return fieldWaitingForPhoto;
+ }
+
+ public void setFieldWaitingForPhoto(@Nullable String fieldWaitingForPhoto) {
+ this.fieldWaitingForPhoto = fieldWaitingForPhoto;
+ }
+
+ public Observable<PhotoResult> getLastPhotoResult() {
+ return lastPhotoResult;
+ }
+
+ public void onSelectPhotoResult(Uri uri) {
+ if (uri == null) {
+ Timber.e("onSelectPhotoResult called with null uri");
+ return;
+ }
+ try {
+ Bitmap bitmap = bitmapUtil.fromUri(uri);
+ onPhotoResult(bitmap);
+ Timber.v("Select photo result returned");
+ } catch (IOException e) {
+ Timber.e(e, "Error getting photo returned by camera");
+ }
+ }
+
+ public void onCapturePhotoResult(Bitmap thumbnail) {
+ if (thumbnail == null) {
+ Timber.e("onCapturePhotoResult called with null thumbnail");
+ return;
+ }
+ onPhotoResult(thumbnail);
+ Timber.v("Photo capture result returned");
+ }
+
+ private void onPhotoResult(Bitmap bitmap) {
+ if (fieldWaitingForPhoto == null) {
+ Timber.e("Photo received but no field waiting for result");
+ return;
+ }
+ String fieldId = fieldWaitingForPhoto;
+ fieldWaitingForPhoto = null;
+ lastPhotoResult.onNext(PhotoResult.create(fieldId, Optional.of(bitmap)));
}
/** Possible outcomes of user clicking "Save". */
@@ -347,4 +366,15 @@ public class EditObservationViewModel extends AbstractViewModel {
NO_CHANGES_TO_SAVE,
SAVED
}
+
+ @AutoValue
+ abstract static class PhotoResult {
+ abstract String getFieldId();
+
+ abstract Optional<Bitmap> getBitmap();
+
+ static PhotoResult create(String fieldId, Optional<Bitmap> bitmap) {
+ return new AutoValue_EditObservationViewModel_PhotoResult(fieldId, bitmap);
+ }
+ }
}
diff --git a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/PhotoFieldViewModel.java b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/PhotoFieldViewModel.java
index ce93bd9f4..1e6183afc 100644
--- a/gnd/src/main/java/com/google/android/gnd/ui/editobservation/PhotoFieldViewModel.java
+++ b/gnd/src/main/java/com/google/android/gnd/ui/editobservation/PhotoFieldViewModel.java
@@ -16,6 +16,8 @@
package com.google.android.gnd.ui.editobservation;
+import static com.google.android.gnd.persistence.remote.firestore.FirestoreStorageManager.getRemoteMediaPath;
+
import android.content.res.Resources;
import android.net.Uri;
import androidx.lifecycle.LiveData;
@@ -25,13 +27,23 @@ import com.google.android.gnd.model.form.Field;
import com.google.android.gnd.model.observation.TextResponse;
import com.google.android.gnd.repository.UserMediaRepository;
import com.google.android.gnd.rx.annotations.Hot;
+import com.google.android.gnd.ui.editobservation.EditObservationViewModel.PhotoResult;
+import java.io.File;
+import java.io.IOException;
+import javax.annotation.Nullable;
import javax.inject.Inject;
+import timber.log.Timber;
public class PhotoFieldViewModel extends AbstractFieldViewModel {
+ private final UserMediaRepository userMediaRepository;
+
private final LiveData<Uri> uri;
private final LiveData<Boolean> photoPresent;
+ @Nullable private String projectId;
+ @Nullable private String observationId;
+
@Hot(replays = true)
private final MutableLiveData<Field> showDialogClicks = new MutableLiveData<>();
@@ -41,6 +53,7 @@ public class PhotoFieldViewModel extends AbstractFieldViewModel {
@Inject
PhotoFieldViewModel(UserMediaRepository userMediaRepository, Resources resources) {
super(resources);
+ this.userMediaRepository = userMediaRepository;
this.photoPresent =
LiveDataReactiveStreams.fromPublisher(
getDetailsTextFlowable().map(path -> !path.isEmpty()));
@@ -76,4 +89,39 @@ public class PhotoFieldViewModel extends AbstractFieldViewModel {
public void updateResponse(String value) {
setResponse(TextResponse.fromString(value));
}
+
+ public void setProjectId(@Nullable String projectId) {
+ this.projectId = projectId;
+ }
+
+ public void setObservationId(@Nullable String observationId) {
+ this.observationId = observationId;
+ }
+
+ public void onPhotoResult(PhotoResult photoResult) throws IOException {
+ if (projectId == null || observationId == null) {
+ Timber.e("projectId or observationId not set");
+ return;
+ }
+ if (!photoResult.getFieldId().equals(getField().getId())) {
+ // Update belongs to another field.
+ return;
+ }
+ if (photoResult.getBitmap().isEmpty()) {
+ clearResponse();
+ Timber.v("Photo cleared");
+ return;
+ }
+ File imageFile =
+ userMediaRepository.savePhoto(photoResult.getBitmap().get(), photoResult.getFieldId());
+ String filename = imageFile.getName();
+ String path = imageFile.getAbsolutePath();
+
+ // Add image to gallery.
+ userMediaRepository.addImageToGallery(path, filename);
+
+ // Update response.
+ String remoteDestinationPath = getRemoteMediaPath(projectId, observationId, filename);
+ updateResponse(remoteDestinationPath);
+ }
}
diff --git a/gnd/src/test/java/com/google/android/gnd/system/CameraManagerTest.java b/gnd/src/test/java/com/google/android/gnd/system/CameraManagerTest.java
deleted file mode 100644
index c77ce9371..000000000
--- a/gnd/src/test/java/com/google/android/gnd/system/CameraManagerTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2021 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.google.android.gnd.system;
-
-import android.Manifest.permission;
-import android.app.Activity;
-import com.google.android.gnd.BaseHiltTest;
-import com.google.android.gnd.rx.Nil;
-import com.google.android.gnd.system.PermissionsManager.PermissionDeniedException;
-import dagger.hilt.android.testing.HiltAndroidTest;
-import io.reactivex.observers.TestObserver;
-import java.io.File;
-import javax.inject.Inject;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.robolectric.RobolectricTestRunner;
-
-@HiltAndroidTest
-@RunWith(RobolectricTestRunner.class)
-public class CameraManagerTest extends BaseHiltTest {
-
- private static final int REQUEST_CODE = CameraManager.CAPTURE_PHOTO_REQUEST_CODE;
- private static final File TEST_FILE = new File("foo/dir");
-
- @Inject ActivityStreams activityStreams;
- @Inject CameraManager cameraManager;
- @Inject TestPermissionUtil permissionUtil;
-
- private void mockPermissions(boolean allow) {
- String[] permissions = {permission.WRITE_EXTERNAL_STORAGE, permission.CAMERA};
- permissionUtil.setPermission(permissions, allow);
- }
-
- @Test
- public void testLaunchPhotoCapture_whenPermissionGranted() {
- TestObserver<Nil> testObserver = cameraManager.capturePhoto(TEST_FILE).test();
- mockPermissions(true);
- testObserver.assertNoErrors();
- }
-
- @Test
- public void testLaunchPhotoCapture_whenPermissionDenied() {
- TestObserver<Nil> testObserver = cameraManager.capturePhoto(TEST_FILE).test();
- mockPermissions(false);
- testObserver.assertError(PermissionDeniedException.class);
- }
-
- @Test
- public void testCapturePhotoResult_requestCancelled() {
- TestObserver<Nil> subscriber = cameraManager.capturePhotoResult().test();
- activityStreams.onActivityResult(REQUEST_CODE, Activity.RESULT_CANCELED, null);
- subscriber.assertResult();
- }
-} | ['gnd/src/main/java/com/google/android/gnd/repository/UserMediaRepository.java', 'gnd/src/test/java/com/google/android/gnd/system/CameraManagerTest.java', 'gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationViewModel.java', 'gnd/src/main/java/com/google/android/gnd/ui/editobservation/EditObservationFragment.java', 'gnd/src/main/java/com/google/android/gnd/persistence/remote/firestore/FirestoreStorageManager.java', 'gnd/src/main/java/com/google/android/gnd/system/StorageManager.java', 'gnd/src/main/java/com/google/android/gnd/system/CameraManager.java', 'gnd/src/main/java/com/google/android/gnd/ui/editobservation/PhotoFieldViewModel.java'] | {'.java': 8} | 8 | 8 | 0 | 0 | 8 | 948,389 | 195,608 | 27,801 | 309 | 16,975 | 3,295 | 416 | 7 | 579 | 92 | 122 | 17 | 0 | 0 | 1970-01-01T00:27:17 | 191 | Kotlin | {'Kotlin': 1134095, 'Java': 81131, 'Python': 3111} | Apache License 2.0 |
782 | camel-tooling/camel-idea-plugin/177/169 | camel-tooling | camel-idea-plugin | https://github.com/camel-tooling/camel-idea-plugin/issues/169 | https://github.com/camel-tooling/camel-idea-plugin/pull/177 | https://github.com/camel-tooling/camel-idea-plugin/pull/177 | 2 | fix | Realtime Validation : getIndex match the position in the string, but not the position of the caret. | Since Camel Catelog have no knowledge about the caret position it report index based on it's location in the String.
But it could be nice if it was possible to call thecatalogService.validateSimpleExpression with an option to marker the position in the error message so we can replace the position with the editor caret position.
> Unknown function: romuteId at location \\<caret\\>33\\</caret\\> Response from Camel CDI on route ${romuteId} using thread: ${threadName}
| 33639adb35a34fe49cebcd8ba44e2a9a70671744 | 33c3cea1b1a1fda9bba2bde315569f467b9cc2e0 | https://github.com/camel-tooling/camel-idea-plugin/compare/33639adb35a34fe49cebcd8ba44e2a9a70671744...33c3cea1b1a1fda9bba2bde315569f467b9cc2e0 | diff --git a/camel-idea-plugin/src/main/java/org/apache/camel/idea/annotator/CamelSimpleAnnotator.java b/camel-idea-plugin/src/main/java/org/apache/camel/idea/annotator/CamelSimpleAnnotator.java
index be0ee93..6188f4e 100644
--- a/camel-idea-plugin/src/main/java/org/apache/camel/idea/annotator/CamelSimpleAnnotator.java
+++ b/camel-idea-plugin/src/main/java/org/apache/camel/idea/annotator/CamelSimpleAnnotator.java
@@ -27,6 +27,7 @@ import org.apache.camel.idea.service.CamelCatalogService;
import org.apache.camel.idea.service.CamelPreferenceService;
import org.apache.camel.idea.service.CamelService;
import org.apache.camel.idea.util.CamelIdeaUtils;
+import org.apache.camel.idea.util.IdeaUtils;
import org.jetbrains.annotations.NotNull;
import static org.apache.camel.idea.util.CamelIdeaUtils.isCameSimpleExpressionUsedAsPredicate;
@@ -49,7 +50,7 @@ public class CamelSimpleAnnotator extends AbstractCamelAnnotator {
*/
void validateText(@NotNull PsiElement element, @NotNull AnnotationHolder holder, @NotNull String text) {
boolean hasSimple = text.contains("${") || text.contains("$simple{");
- if (hasSimple && CamelIdeaUtils.isCamelSimpleExpression(element)) {
+ if (hasSimple || CamelIdeaUtils.isCamelSimpleExpression(element)) {
CamelCatalog catalogService = ServiceManager.getService(element.getProject(), CamelCatalogService.class).get();
CamelService camelService = ServiceManager.getService(element.getProject(), CamelService.class);
@@ -58,10 +59,21 @@ public class CamelSimpleAnnotator extends AbstractCamelAnnotator {
ClassLoader loader = camelService.getCamelCoreClassloader();
if (loader != null) {
SimpleValidationResult result;
+
+ int correctEndOffsetMinusOneOff = 2;
+ int correctStartOffsetMinusOneOff = 1;
+
+ if (IdeaUtils.isXmlLanguage(element)) {
+ // the xml text range is one off compare to java text range
+ correctEndOffsetMinusOneOff = 1;
+ correctStartOffsetMinusOneOff = 0;
+ }
+
boolean predicate = isCameSimpleExpressionUsedAsPredicate(element);
if (predicate) {
LOG.debug("Validate simple predicate: " + text);
result = catalogService.validateSimplePredicate(loader, text);
+ correctEndOffsetMinusOneOff = 1; // the result for predicate index is minus one off compare to simple expression
} else {
LOG.debug("Validate simple expression: " + text);
result = catalogService.validateSimpleExpression(loader, text);
@@ -70,8 +82,17 @@ public class CamelSimpleAnnotator extends AbstractCamelAnnotator {
String error = result.getShortError();
TextRange range = element.getTextRange();
if (result.getIndex() > 0) {
- // use -1 to skip the last quote sign
- range = TextRange.create(range.getStartOffset() + result.getIndex(), range.getEndOffset() - 1);
+ //we need to calculate the correct start and end position to be sure we highlight the correct word
+ int startIdx = result.getIndex();
+ //test if the simple expression is closed correctly
+ int endIdx = text.indexOf("}", startIdx);
+ if (endIdx == -1) {
+ //the expression is not closed, test for first " " to see if can stop text range here
+ endIdx = text.indexOf(" ", startIdx);
+ }
+ //calc the end index for highlighted word
+ endIdx = endIdx == -1 ? (range.getEndOffset() - 1) : (range.getStartOffset() + endIdx) + correctEndOffsetMinusOneOff;
+ range = TextRange.create(range.getStartOffset() + result.getIndex() + correctStartOffsetMinusOneOff, endIdx);
}
holder.createErrorAnnotation(range, error);
}
diff --git a/camel-idea-plugin/src/main/java/org/apache/camel/idea/util/CamelIdeaUtils.java b/camel-idea-plugin/src/main/java/org/apache/camel/idea/util/CamelIdeaUtils.java
index ee05f28..ab7f0a8 100644
--- a/camel-idea-plugin/src/main/java/org/apache/camel/idea/util/CamelIdeaUtils.java
+++ b/camel-idea-plugin/src/main/java/org/apache/camel/idea/util/CamelIdeaUtils.java
@@ -168,7 +168,7 @@ public final class CamelIdeaUtils {
element = element.getParent();
}
if (element instanceof PsiMethodCallExpression) {
- exp = (PsiMethodCallExpression) element;
+ exp = PsiTreeUtil.getParentOfType(element.getParent(), PsiMethodCallExpression.class);
}
}
if (exp instanceof PsiMethodCallExpression) {
diff --git a/camel-idea-plugin/src/test/java/org/apache/camel/idea/CamelLightCodeInsightFixtureTestCaseIT.java b/camel-idea-plugin/src/test/java/org/apache/camel/idea/CamelLightCodeInsightFixtureTestCaseIT.java
index f786f91..7bf632b 100644
--- a/camel-idea-plugin/src/test/java/org/apache/camel/idea/CamelLightCodeInsightFixtureTestCaseIT.java
+++ b/camel-idea-plugin/src/test/java/org/apache/camel/idea/CamelLightCodeInsightFixtureTestCaseIT.java
@@ -19,10 +19,17 @@ package org.apache.camel.idea;
import java.io.File;
import java.io.IOException;
import com.intellij.openapi.components.ServiceManager;
+import com.intellij.openapi.module.Module;
+import com.intellij.openapi.roots.ContentEntry;
+import com.intellij.openapi.roots.ModifiableRootModel;
+import com.intellij.testFramework.LightProjectDescriptor;
+import com.intellij.testFramework.PsiTestUtil;
+import com.intellij.testFramework.fixtures.DefaultLightProjectDescriptor;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase;
import org.apache.camel.idea.service.CamelCatalogService;
import org.apache.camel.idea.service.CamelService;
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
+import org.jetbrains.annotations.NotNull;
/**
@@ -31,6 +38,17 @@ import org.jboss.shrinkwrap.resolver.api.maven.Maven;
*/
public abstract class CamelLightCodeInsightFixtureTestCaseIT extends LightCodeInsightFixtureTestCase {
+ public static final String CAMEL_CORE_MAVEN_ARTIFACT = "org.apache.camel:camel-core:2.19.0-SNAPSHOT";
+ private static File[] mavenArtifacts;
+ private boolean ignoreCamelCoreLib;
+
+ static {
+ try {
+ mavenArtifacts = getMavenArtifacts(CAMEL_CORE_MAVEN_ARTIFACT);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
@Override
protected void setUp() throws Exception {
super.setUp();
@@ -39,6 +57,19 @@ public abstract class CamelLightCodeInsightFixtureTestCaseIT extends LightCodeIn
ServiceManager.getService(myModule.getProject(), CamelService.class).setCamelPresent(true);
}
+ @Override
+ protected LightProjectDescriptor getProjectDescriptor() {
+
+ return new DefaultLightProjectDescriptor() {
+ @Override
+ public void configureModule(@NotNull Module module, @NotNull ModifiableRootModel model, @NotNull ContentEntry contentEntry) {
+ super.configureModule(module, model, contentEntry);
+ if (!isIgnoreCamelCoreLib()) {
+ PsiTestUtil.addLibrary(module, model, "Maven: " + CAMEL_CORE_MAVEN_ARTIFACT, mavenArtifacts[0].getParent(), mavenArtifacts[0].getName());
+ }
+ }
+ };
+ }
@Override
protected String getTestDataPath() {
return "src/test/resources/testData/";
@@ -53,8 +84,19 @@ public abstract class CamelLightCodeInsightFixtureTestCaseIT extends LightCodeIn
* @return Array of artifact files
* @throws IOException
*/
- protected File[] getMavenArtifacts(String... mavneAritfiact) throws IOException {
- File[] libs = Maven.resolver().loadPomFromFile("pom.xml").resolve(mavneAritfiact).withoutTransitivity().asFile();
+ protected static File[] getMavenArtifacts(String... mavneAritfiact) throws IOException {
+ File[] libs = Maven.resolver().loadPomFromFile("pom.xml")
+ .resolve(mavneAritfiact)
+ .withoutTransitivity().asFile();
+
return libs;
}
+
+ public boolean isIgnoreCamelCoreLib() {
+ return this.ignoreCamelCoreLib;
+ }
+
+ public void setIgnoreCamelCoreLib(boolean ignoreCamelCoreLib) {
+ this.ignoreCamelCoreLib = ignoreCamelCoreLib;
+ }
}
diff --git a/camel-idea-plugin/src/test/java/org/apache/camel/idea/annotator/CamelSimpleAnnotatorTestIT.java b/camel-idea-plugin/src/test/java/org/apache/camel/idea/annotator/CamelSimpleAnnotatorTestIT.java
index 5c2783d..2b5ecd4 100644
--- a/camel-idea-plugin/src/test/java/org/apache/camel/idea/annotator/CamelSimpleAnnotatorTestIT.java
+++ b/camel-idea-plugin/src/test/java/org/apache/camel/idea/annotator/CamelSimpleAnnotatorTestIT.java
@@ -16,17 +16,6 @@
*/
package org.apache.camel.idea.annotator;
-import java.io.File;
-
-import com.intellij.openapi.application.ApplicationManager;
-import com.intellij.openapi.roots.ModuleRootModificationUtil;
-import com.intellij.openapi.roots.OrderRootType;
-import com.intellij.openapi.roots.libraries.Library;
-import com.intellij.openapi.roots.libraries.LibraryTable;
-import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
-import com.intellij.openapi.vfs.LocalFileSystem;
-import com.intellij.openapi.vfs.VirtualFile;
-import com.intellij.util.ui.UIUtil;
import org.apache.camel.idea.CamelLightCodeInsightFixtureTestCaseIT;
@@ -35,25 +24,16 @@ import org.apache.camel.idea.CamelLightCodeInsightFixtureTestCaseIT;
* TIP : Writing highlighting test can be tricky because if the highlight is one character off
* it will fail, but the error messaged might still be correct. In this case it's likely the TextRange
* is incorrect.
+ *
+ * So far we can have been able to avoid pointing the -Didea.home.path=<location of Intellij CI source code>
+ * because it's didn't really matter it could not resolve JDK classes when testing highlight. If you need
+ * to resolve the JDK classes you will have to point the idea.home.path to the right location
*/
public class CamelSimpleAnnotatorTestIT extends CamelLightCodeInsightFixtureTestCaseIT {
- public static final String CAMEL_CORE_MAVEN_ARTIFACT = "org.apache.camel:camel-core:2.19.0-SNAPSHOT";
-
@Override
- protected void setUp() throws Exception {
- super.setUp();
- File[] mavenArtifacts = getMavenArtifacts(CAMEL_CORE_MAVEN_ARTIFACT);
- VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(mavenArtifacts[0]);
- final LibraryTable projectLibraryTable = LibraryTablesRegistrar.getInstance().getLibraryTable(myModule.getProject());
- ApplicationManager.getApplication().runWriteAction(() -> {
- Library library = projectLibraryTable.createLibrary("Maven: " + CAMEL_CORE_MAVEN_ARTIFACT);
- final Library.ModifiableModel libraryModifiableModel = library.getModifiableModel();
- libraryModifiableModel.addRoot(virtualFile, OrderRootType.CLASSES);
- libraryModifiableModel.commit();
- ModuleRootModificationUtil.addDependency(myModule, library);
- });
- UIUtil.dispatchAllInvocationEvents();
+ protected String getTestDataPath() {
+ return "src/test/resources/testData/annotator";
}
public void testAnnotatorSimpleValidation() {
@@ -61,6 +41,41 @@ public class CamelSimpleAnnotatorTestIT extends CamelLightCodeInsightFixtureTest
myFixture.checkHighlighting(false, false, true, true);
}
+ public void testAnnotatorExpressionValidation() {
+ myFixture.configureByText("AnnotatorTestData.java", getJavaWithExpression());
+ myFixture.checkHighlighting(false, false, true, true);
+ }
+
+ public void testAnnotatorOpenBracketSimpleValidation() {
+ myFixture.configureByText("AnnotatorTestData.java", getJavaOpenBracketWithSimple());
+ myFixture.checkHighlighting(false, false, true, true);
+ }
+
+ public void testAnnotatorMultipleOpenBracketSimpleValidation() {
+ myFixture.configureByText("AnnotatorTestData.java", getJavaMutlipleOpenBracketWithSimple());
+ myFixture.checkHighlighting(false, false, true, true);
+ }
+
+ public void testAnnotatorCamelPredicateValidation() {
+ myFixture.configureByText("AnnotatorTestData.java", getJavaWithCamelPredicate());
+ myFixture.checkHighlighting(false, false, false, true);
+ }
+
+ public void testAnnotatorCamelPredicateValidation2() {
+ myFixture.configureByText("AnnotatorTestData.java", getJavaWithCamelPredicate2());
+ myFixture.checkHighlighting(false, false, false, true);
+ }
+
+ public void testXmlAnnotatorSimpleValidation2() {
+ myFixture.configureByText("AnnotatorTestData.xml", getXmlWithSimple());
+ myFixture.checkHighlighting(false, false, false, true);
+ }
+
+ public void testXmlAnnotatorPredicateValidation2() {
+ myFixture.configureByText("AnnotatorTestData.xml", getXmlWithPredicate());
+ myFixture.checkHighlighting(false, false, false, true);
+ }
+
private String getJavaWithSimple() {
return "import org.apache.camel.builder.RouteBuilder;\\n"
+ "public class MyRouteBuilder extends RouteBuilder {\\n"
@@ -68,8 +83,98 @@ public class CamelSimpleAnnotatorTestIT extends CamelLightCodeInsightFixtureTest
+ " from(\\"netty-http:http://localhost/cdi?matchOnUriPrefix=true&nettySharedHttpServer=#httpServer\\")\\n"
+ " .id(\\"http-route-cdi\\")\\n"
+ " .transform()\\n"
- + " .simple(\\"Response from Camel CDI on route<error descr=\\"Unknown function: xrouteId\\"> ${xrouteId} using thread: ${threadName}</error>\\");"
+ + " .simple(\\"Response from Camel CDI on route<error descr=\\"Unknown function: xrouteId\\">${xrouteId}</error> using thread: ${threadName}\\");"
+ + " }\\n"
+ + " }";
+ }
+
+ private String getJavaWithExpression() {
+ return "import org.apache.camel.builder.RouteBuilder;"
+ + "public class MyRouteBuilder extends RouteBuilder {\\n"
+ + " public void configure() throws Exception {\\n"
+ + " from(\\"timer:stream?repeatCount=1\\")\\n"
+ + " .log(\\"Result from query <error descr=\\"Unknown function: xbody\\">${xbody}</error>\\")\\n"
+ + " .process(exchange -> {\\n"
+ + " exchange.getIn().setBody(Arrays.asList(\\"fharms\\"));\\n"
+ + " .to(\\"file:test.txt\\");\\n"
+ + " }"
+ + " }";
+ }
+
+ private String getJavaOpenBracketWithSimple() {
+ return "import org.apache.camel.builder.RouteBuilder;\\n"
+ + "public class MyRouteBuilder extends RouteBuilder {\\n"
+ + " public void configure() throws Exception {\\n"
+ + " from(\\"netty-http:http://localhost/cdi?matchOnUriPrefix=true&nettySharedHttpServer=#httpServer\\")\\n"
+ + " .id(\\"http-route-cdi\\")\\n"
+ + " .transform()\\n"
+ + " .simple(\\"Response from Camel CDI on route${routeId} using thread: ${threadNam<error descr=\\"expected symbol functionEnd but was eol\\">e</error>\\");"
+ + " }\\n"
+ + " }";
+ }
+
+ private String getJavaMutlipleOpenBracketWithSimple() {
+ return "import org.apache.camel.builder.RouteBuilder;\\n"
+ + "public class MyRouteBuilder extends RouteBuilder {\\n"
+ + " public void configure() throws Exception {\\n"
+ + " from(\\"netty-http:http://localhost/cdi?matchOnUriPrefix=true&nettySharedHttpServer=#httpServer\\")\\n"
+ + " .id(\\"http-route-cdi\\")\\n"
+ + " .transform()\\n"
+ + " .simple(\\"Response from Camel CDI on route${routeId} using thread: ${threadNam<error descr=\\"expected symbol functionEnd but was eol\\">e</error>\\");"
+ + " }\\n"
+ + " }";
+ }
+
+ private String getJavaWithCamelPredicate() {
+ return "import org.apache.camel.builder.RouteBuilder;\\n"
+ + "public class MyRouteBuilder extends RouteBuilder {\\n"
+ + " public void configure() throws Exception {\\n"
+ + " from(\\"direct:start\\")\\n"
+ + " .loopDoWhile(simple(\\"${body.length} <error descr=\\"Unexpected token =\\">=!=</error> 12\\"))\\n"
+ + " .to(\\"mock:loop\\")\\n"
+ + " .transform(body().append(\\"A\\"))\\n"
+ + " .end()\\n"
+ + " .to(\\"mock:result\\");"
+ " }\\n"
+ " }";
}
+
+ private String getJavaWithCamelPredicate2() {
+ return "import org.apache.camel.builder.RouteBuilder;\\n"
+ + "public class MyRouteBuilder extends RouteBuilder {\\n"
+ + " public void configure() throws Exception {\\n"
+ + " from(\\"direct:start\\")\\n"
+ + " .loopDoWhile(simple(\\"${body.length} != 12\\"))\\n"
+ + " .filter().simple(<error descr=\\"Unexpected token x\\">\\"xxxx\\"</error>)\\n"
+ + " .filter(simple(<error descr=\\"Unexpected token y\\">\\"yyyy\\"</error>))\\n"
+ + " .to(\\"mock:loop\\")\\n"
+ + " .transform(body().append(\\"A\\"))\\n"
+ + " .end()\\n"
+ + " .to(\\"mock:result\\");"
+ + " }\\n"
+ + " }";
+ }
+
+ private String getXmlWithSimple() {
+ return "<camelContext xmlns=\\"http://camel.apache.org/schema/spring\\">\\n"
+ + " <route id=\\"timerToInRoute\\">\\n"
+ + " <from uri=\\"timer:foo?period=1s\\"/>\\n"
+ + " <transform>\\n"
+ + " <simple>Message at <error descr=\\"Unknown function: daxcdte:now:yyyy-MM-dd HH:mm:ss\\">${daxcdte:now:yyyy-MM-dd HH:mm:ss}</error></simple>\\n"
+ + " </transform>\\n"
+ + " <to uri=\\"activemq:queue:inbox\\"/>\\n"
+ + " </camelContext>";
+ }
+
+ private String getXmlWithPredicate() {
+ return "<from uri=\\"direct:start\\"/>\\n"
+ + " <loop doWhile=\\"true\\">\\n"
+ + " <simple>${body.length} !s1= 12</simple>\\n"
+ + " <filter/><simple><error descr=\\"Unexpected token x\\">xxxx</error></simple>\\n"
+ + " <filter>\\n"
+ + " <simple><error descr=\\"Unexpected token y\\">yyyy</error></simple>\\n"
+ + " </filter>\\n"
+ + " </loop>\\n"
+ + " <to>mock:result</to>";
+ }
}
\\ No newline at end of file
diff --git a/camel-idea-plugin/src/test/java/org/apache/camel/idea/service/CamelCatalogServiceTestIT.java b/camel-idea-plugin/src/test/java/org/apache/camel/idea/service/CamelCatalogServiceTestIT.java
index ba02b1c..86e6940 100644
--- a/camel-idea-plugin/src/test/java/org/apache/camel/idea/service/CamelCatalogServiceTestIT.java
+++ b/camel-idea-plugin/src/test/java/org/apache/camel/idea/service/CamelCatalogServiceTestIT.java
@@ -26,8 +26,10 @@ import org.apache.camel.idea.CamelLightCodeInsightFixtureTestCaseIT;
*/
public class CamelCatalogServiceTestIT extends CamelLightCodeInsightFixtureTestCaseIT {
+
@Override
protected void setUp() throws Exception {
+ setIgnoreCamelCoreLib(true);
super.setUp();
ServiceManager.getService(myModule.getProject(), CamelService.class).setCamelPresent(false);
} | ['camel-idea-plugin/src/main/java/org/apache/camel/idea/annotator/CamelSimpleAnnotator.java', 'camel-idea-plugin/src/test/java/org/apache/camel/idea/CamelLightCodeInsightFixtureTestCaseIT.java', 'camel-idea-plugin/src/main/java/org/apache/camel/idea/util/CamelIdeaUtils.java', 'camel-idea-plugin/src/test/java/org/apache/camel/idea/service/CamelCatalogServiceTestIT.java', 'camel-idea-plugin/src/test/java/org/apache/camel/idea/annotator/CamelSimpleAnnotatorTestIT.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 251,901 | 49,190 | 6,223 | 48 | 2,087 | 378 | 29 | 2 | 474 | 71 | 102 | 6 | 0 | 0 | 1970-01-01T00:24:44 | 187 | Java | {'Java': 1662284, 'Shell': 328, 'Dockerfile': 271} | Apache License 2.0 |
316 | apache/fluo/402/269 | apache | fluo | https://github.com/apache/fluo/issues/269 | https://github.com/apache/fluo/pull/402 | https://github.com/apache/fluo/pull/402 | 1 | fixes | FluoInputFormat takes properties | FluoInputFormat.configure() takes a Properties object. This is inconsistent with other methods that take a commons configuration object.
| 58c24e1f0ca0912c540b41f595b25150daf12779 | 2df6b9100a0b66bcf10fb5f1093b450e1b87c934 | https://github.com/apache/fluo/compare/58c24e1f0ca0912c540b41f595b25150daf12779...2df6b9100a0b66bcf10fb5f1093b450e1b87c934 | diff --git a/modules/mapreduce/src/main/java/io/fluo/mapreduce/FluoInputFormat.java b/modules/mapreduce/src/main/java/io/fluo/mapreduce/FluoInputFormat.java
index 39f635ee..7c58dfa7 100644
--- a/modules/mapreduce/src/main/java/io/fluo/mapreduce/FluoInputFormat.java
+++ b/modules/mapreduce/src/main/java/io/fluo/mapreduce/FluoInputFormat.java
@@ -20,7 +20,6 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
import java.util.Map.Entry;
-import java.util.Properties;
import io.fluo.api.config.FluoConfiguration;
import io.fluo.api.config.ScannerConfiguration;
@@ -34,6 +33,7 @@ import io.fluo.core.util.SpanUtil;
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
import org.apache.accumulo.core.client.mapreduce.RangeInputSplit;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationConverter;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -136,23 +136,23 @@ public class FluoInputFormat extends InputFormat<Bytes,ColumnIterator> {
* Configure properties needed to connect to a Fluo instance
*
* @param conf
- * @param props
+ * @param config
* use {@link io.fluo.api.config.FluoConfiguration} to configure programmatically
*/
@SuppressWarnings("deprecation")
- public static void configure(Job conf, Properties props) {
+ public static void configure(Job conf, Configuration config) {
try {
- FluoConfiguration config = new FluoConfiguration(ConfigurationConverter.getConfiguration(props));
- try (Environment env = new Environment(config)) {
+ FluoConfiguration fconfig = new FluoConfiguration(config);
+ try (Environment env = new Environment(fconfig)) {
long ts = env.getSharedResources().getTimestampTracker().allocateTimestamp();
conf.getConfiguration().setLong(TIMESTAMP_CONF_KEY, ts);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
- props.store(baos, "");
+ ConfigurationConverter.getProperties(config).store(baos, "");
conf.getConfiguration().set(PROPS_CONF_KEY, new String(baos.toByteArray(), "UTF8"));
- AccumuloInputFormat.setZooKeeperInstance(conf, config.getAccumuloInstance(), config.getAccumuloZookeepers());
- AccumuloInputFormat.setConnectorInfo(conf, config.getAccumuloUser(), new PasswordToken(config.getAccumuloPassword()));
+ AccumuloInputFormat.setZooKeeperInstance(conf, fconfig.getAccumuloInstance(), fconfig.getAccumuloZookeepers());
+ AccumuloInputFormat.setConnectorInfo(conf, fconfig.getAccumuloUser(), new PasswordToken(fconfig.getAccumuloPassword()));
AccumuloInputFormat.setInputTableName(conf, env.getTable());
AccumuloInputFormat.setScanAuthorizations(conf, env.getAuthorizations());
} | ['modules/mapreduce/src/main/java/io/fluo/mapreduce/FluoInputFormat.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 585,372 | 131,772 | 18,355 | 142 | 1,139 | 232 | 16 | 1 | 138 | 17 | 25 | 2 | 0 | 0 | 1970-01-01T00:23:41 | 181 | Java | {'Java': 1444123, 'Shell': 37149, 'Thrift': 990} | Apache License 2.0 |