id
int64 0
10.2k
| text_id
stringlengths 17
67
| repo_owner
stringclasses 232
values | repo_name
stringclasses 295
values | issue_url
stringlengths 39
89
| pull_url
stringlengths 37
87
| comment_url
stringlengths 37
94
| links_count
int64 1
2
| link_keyword
stringclasses 12
values | issue_title
stringlengths 7
197
| issue_body
stringlengths 45
21.3k
| base_sha
stringlengths 40
40
| head_sha
stringlengths 40
40
| diff_url
stringlengths 120
170
| diff
stringlengths 478
132k
| changed_files
stringlengths 47
2.6k
| changed_files_exts
stringclasses 22
values | changed_files_count
int64 1
22
| java_changed_files_count
int64 1
22
| kt_changed_files_count
int64 0
0
| py_changed_files_count
int64 0
0
| code_changed_files_count
int64 1
22
| repo_symbols_count
int64 32.6k
242M
| repo_tokens_count
int64 6.59k
49.2M
| repo_lines_count
int64 992
6.2M
| repo_files_without_tests_count
int64 12
28.1k
| changed_symbols_count
int64 0
36.1k
| changed_tokens_count
int64 0
6.5k
| changed_lines_count
int64 0
561
| changed_files_without_tests_count
int64 1
17
| issue_symbols_count
int64 45
21.3k
| issue_words_count
int64 2
1.39k
| issue_tokens_count
int64 13
4.47k
| issue_lines_count
int64 1
325
| issue_links_count
int64 0
19
| issue_code_blocks_count
int64 0
31
| pull_create_at
timestamp[s] | repo_stars
int64 10
44.3k
| repo_language
stringclasses 8
values | repo_languages
stringclasses 296
values | repo_license
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2,091 | smallrye/smallrye-graphql/507/506 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/506 | https://github.com/smallrye/smallrye-graphql/pull/507 | https://github.com/smallrye/smallrye-graphql/pull/507 | 1 | fix | Handling of LocalDateTime (and probably others) | As specified [here](https://download.eclipse.org/microprofile/microprofile-graphql-1.0.3/microprofile-graphql.html#scalars), the server generates, i.e. a `scalar DateTime` in the schema for a `LocalDateTime` parameter:
```
type Query {
hello(
"ISO-8601"
ping: DateTime
): String
}
"Scalar for DateTime"
scalar DateTime
```
The client must use this predefined type mapping as well. | 4b04398363fe4e16ea2b23906f98f9772ef24344 | b8c9adce51fa0ee34430c2e93a2e84c87227e15a | https://github.com/smallrye/smallrye-graphql/compare/4b04398363fe4e16ea2b23906f98f9772ef24344...b8c9adce51fa0ee34430c2e93a2e84c87227e15a | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
index a554a679..6a1ddf9a 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
@@ -3,6 +3,7 @@ package io.smallrye.graphql.client.typesafe.impl.reflection;
import java.lang.annotation.Annotation;
import java.lang.reflect.Parameter;
+import org.eclipse.microprofile.graphql.Id;
import org.eclipse.microprofile.graphql.Input;
import org.eclipse.microprofile.graphql.Name;
@@ -28,8 +29,8 @@ public class ParameterInfo {
}
public String graphQlInputTypeName() {
- if (type.isScalar()) {
- return graphQlInputTypeName(type) + optionalExclamationMark(type);
+ if (parameter.isAnnotationPresent(Id.class)) {
+ return "ID" + optionalExclamationMark(type);
} else if (type.isCollection()) {
return "[" + withExclamationMark(type.getItemType()) + "]" + optionalExclamationMark(type);
} else {
@@ -42,23 +43,45 @@ public class ParameterInfo {
}
private String graphQlInputTypeName(TypeInfo type) {
- return simpleInputTypeName(type);
- }
-
- private String simpleInputTypeName(TypeInfo type) {
if (type.isAnnotated(Input.class))
return type.getAnnotation(Input.class).value();
if (type.isAnnotated(Name.class))
return type.getAnnotation(Name.class).value();
switch (type.getSimpleName()) {
+ case "int":
+ case "Integer":
+ case "short":
+ case "Short":
+ case "byte":
+ case "Byte":
+ return "Int";
+ case "float":
+ case "Float":
+ case "double":
+ case "Double":
+ return "Float";
+ case "String":
+ case "char":
+ case "Character":
+ return "String";
case "boolean":
case "Boolean":
return "Boolean";
- case "int":
- case "Integer":
+ case "BigInteger":
case "long":
case "Long":
- return "Int";
+ return "BigInteger";
+ case "BigDecimal":
+ return "BigDecimal";
+ case "LocalDate":
+ return "Date";
+ case "LocalTime":
+ case "OffsetTime":
+ return "Time";
+ case "LocalDateTime":
+ case "OffsetDateTime":
+ case "ZonedDateTime":
+ return "DateTime";
default:
return type.getSimpleName() + (type.isScalar() || type.isEnum() ? "" : "Input");
}
diff --git a/client/implementation/src/test/java/test/unit/ParametersBehavior.java b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
index bbe47e98..574b14d7 100644
--- a/client/implementation/src/test/java/test/unit/ParametersBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
@@ -552,7 +552,7 @@ public class ParametersBehavior {
String greeting = api.greeting(123L);
- then(fixture.query()).isEqualTo("query greeting($who: Int) { greeting(who: $who) }");
+ then(fixture.query()).isEqualTo("query greeting($who: BigInteger) { greeting(who: $who) }");
then(fixture.variables()).isEqualTo("{'who':123}");
then(fixture.operationName()).isEqualTo("greeting");
then(greeting).isEqualTo("hi, foo");
@@ -570,7 +570,7 @@ public class ParametersBehavior {
String greeting = api.greeting(123L);
- then(fixture.query()).isEqualTo("query greeting($who: Int!) { greeting(who: $who) }");
+ then(fixture.query()).isEqualTo("query greeting($who: BigInteger!) { greeting(who: $who) }");
then(fixture.variables()).isEqualTo("{'who':123}");
then(fixture.operationName()).isEqualTo("greeting");
then(greeting).isEqualTo("hi, foo");
@@ -588,7 +588,7 @@ public class ParametersBehavior {
String greeting = api.greeting(123L);
- then(fixture.query()).isEqualTo("query greeting($who: Int!) { greeting(who: $who) }");
+ then(fixture.query()).isEqualTo("query greeting($who: BigInteger!) { greeting(who: $who) }");
then(fixture.variables()).isEqualTo("{'who':123}");
then(fixture.operationName()).isEqualTo("greeting");
then(greeting).isEqualTo("hi, foo");
diff --git a/client/implementation/src/test/java/test/unit/ScalarBehavior.java b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
index be1493a3..71c20e8f 100644
--- a/client/implementation/src/test/java/test/unit/ScalarBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
@@ -9,10 +9,15 @@ import java.math.BigInteger;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
+import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Date;
+import org.eclipse.microprofile.graphql.Id;
+import org.eclipse.microprofile.graphql.NonNull;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
@@ -24,12 +29,12 @@ class ScalarBehavior {
@GraphQlClientApi
interface BoolApi {
- boolean bool();
+ boolean bool(boolean in);
}
@GraphQlClientApi
interface BooleanApi {
- Boolean bool();
+ Boolean bool(Boolean in);
}
@Nested
@@ -39,9 +44,9 @@ class ScalarBehavior {
fixture.returnsData("'bool':true");
BoolApi api = fixture.builder().build(BoolApi.class);
- boolean bool = api.bool();
+ boolean bool = api.bool(true);
- then(fixture.query()).isEqualTo("query bool { bool }");
+ then(fixture.query()).isEqualTo("query bool($in: Boolean!) { bool(in: $in) }");
then(bool).isTrue();
}
@@ -50,7 +55,7 @@ class ScalarBehavior {
fixture.returnsData("'bool':null");
BoolApi api = fixture.builder().build(BoolApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::bool, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.bool(true), GraphQlClientException.class);
then(thrown).hasMessage("invalid boolean value for " + BoolApi.class.getName() + "#bool: null");
}
@@ -60,7 +65,7 @@ class ScalarBehavior {
fixture.returnsData("'bool':'xxx'");
BoolApi api = fixture.builder().build(BoolApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::bool, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.bool(true), GraphQlClientException.class);
then(thrown).hasMessage("invalid boolean value for " + BoolApi.class.getName() + "#bool: \\"xxx\\"");
}
@@ -70,7 +75,7 @@ class ScalarBehavior {
fixture.returnsData("'bool':123");
BoolApi api = fixture.builder().build(BoolApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::bool, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.bool(true), GraphQlClientException.class);
then(thrown).hasMessage("invalid boolean value for " + BoolApi.class.getName() + "#bool: 123");
}
@@ -80,7 +85,7 @@ class ScalarBehavior {
fixture.returnsData("'bool':[123]");
BoolApi api = fixture.builder().build(BoolApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::bool, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.bool(true), GraphQlClientException.class);
then(thrown).hasMessage("invalid boolean value for " + BoolApi.class.getName() + "#bool: [123]");
}
@@ -90,7 +95,7 @@ class ScalarBehavior {
fixture.returnsData("'bool':{'foo':'bar'}");
BoolApi api = fixture.builder().build(BoolApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::bool, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.bool(true), GraphQlClientException.class);
then(thrown).hasMessage("invalid boolean value for " + BoolApi.class.getName() + "#bool: {\\"foo\\":\\"bar\\"}");
}
@@ -100,21 +105,21 @@ class ScalarBehavior {
fixture.returnsData("'bool':true");
BooleanApi api = fixture.builder().build(BooleanApi.class);
- Boolean bool = api.bool();
+ Boolean bool = api.bool(true);
- then(fixture.query()).isEqualTo("query bool { bool }");
+ then(fixture.query()).isEqualTo("query bool($in: Boolean) { bool(in: $in) }");
then(bool).isTrue();
}
}
@GraphQlClientApi
interface ByteApi {
- Byte code();
+ Byte code(Byte in);
}
@GraphQlClientApi
interface PrimitiveByteApi {
- byte code();
+ byte code(byte in);
}
@Nested
@@ -124,9 +129,9 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
ByteApi api = fixture.builder().build(ByteApi.class);
- Byte code = api.code();
+ Byte code = api.code((byte) 1);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int) { code(in: $in) }");
then(code).isEqualTo((byte) 5);
}
@@ -135,9 +140,9 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
PrimitiveByteApi api = fixture.builder().build(PrimitiveByteApi.class);
- byte code = api.code();
+ byte code = api.code((byte) 1);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int!) { code(in: $in) }");
then(code).isEqualTo((byte) 5);
}
@@ -147,7 +152,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooBig);
ByteApi api = fixture.builder().build(ByteApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code((byte) 1), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Byte value for " + ByteApi.class.getName() + "#code: " + tooBig);
}
@@ -158,7 +163,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooSmall);
ByteApi api = fixture.builder().build(ByteApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code((byte) 1), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Byte value for " + ByteApi.class.getName() + "#code: " + tooSmall);
}
@@ -166,12 +171,12 @@ class ScalarBehavior {
@GraphQlClientApi
interface CharacterApi {
- Character code();
+ Character code(Character in);
}
@GraphQlClientApi
interface PrimitiveCharApi {
- char code();
+ char code(char in);
}
@Nested
@@ -181,9 +186,9 @@ class ScalarBehavior {
fixture.returnsData("'code':'a'");
CharacterApi api = fixture.builder().build(CharacterApi.class);
- Character c = api.code();
+ Character c = api.code('c');
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: String) { code(in: $in) }");
then(c).isEqualTo('a');
}
@@ -192,7 +197,7 @@ class ScalarBehavior {
fixture.returnsData("'code':'ab'");
CharacterApi api = fixture.builder().build(CharacterApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code('c'), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Character value for " + CharacterApi.class.getName() + "#code: \\"ab\\"");
}
@@ -202,9 +207,9 @@ class ScalarBehavior {
fixture.returnsData("'code':97");
CharacterApi api = fixture.builder().build(CharacterApi.class);
- Character c = api.code();
+ Character c = api.code('c');
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: String) { code(in: $in) }");
then(c).isEqualTo('a');
}
@@ -214,7 +219,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooBig);
CharacterApi api = fixture.builder().build(CharacterApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code('c'), GraphQlClientException.class);
then(thrown)
.hasMessage("invalid java.lang.Character value for " + CharacterApi.class.getName() + "#code: " + tooBig);
@@ -225,7 +230,7 @@ class ScalarBehavior {
fixture.returnsData("'code':-15");
CharacterApi api = fixture.builder().build(CharacterApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code('c'), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Character value for " + CharacterApi.class.getName() + "#code: -15");
}
@@ -235,9 +240,9 @@ class ScalarBehavior {
fixture.returnsData("'code':'a'");
PrimitiveCharApi api = fixture.builder().build(PrimitiveCharApi.class);
- char c = api.code();
+ char c = api.code('c');
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: String!) { code(in: $in) }");
then(c).isEqualTo('a');
}
@@ -246,7 +251,7 @@ class ScalarBehavior {
fixture.returnsData("'code':'ab'");
PrimitiveCharApi api = fixture.builder().build(PrimitiveCharApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code('c'), GraphQlClientException.class);
then(thrown).hasMessage("invalid char value for " + PrimitiveCharApi.class.getName() + "#code: \\"ab\\"");
}
@@ -254,12 +259,12 @@ class ScalarBehavior {
@GraphQlClientApi
interface ShortApi {
- Short code();
+ Short code(Short in);
}
@GraphQlClientApi
interface PrimitiveShortApi {
- short code();
+ short code(short in);
}
@Nested
@@ -269,9 +274,9 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
ShortApi api = fixture.builder().build(ShortApi.class);
- Short code = api.code();
+ Short code = api.code((short) 2);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int) { code(in: $in) }");
then(code).isEqualTo((short) 5);
}
@@ -281,7 +286,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooSmall);
ShortApi api = fixture.builder().build(ShortApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code((short) 2), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Short value for " + ShortApi.class.getName() + "#code: " + tooSmall);
}
@@ -292,7 +297,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooBig);
ShortApi api = fixture.builder().build(ShortApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code((short) 2), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Short value for " + ShortApi.class.getName() + "#code: " + tooBig);
}
@@ -302,21 +307,21 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
PrimitiveShortApi api = fixture.builder().build(PrimitiveShortApi.class);
- short code = api.code();
+ short code = api.code((short) 2);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int!) { code(in: $in) }");
then(code).isEqualTo((short) 5);
}
}
@GraphQlClientApi
interface IntegerApi {
- Integer code();
+ Integer code(Integer in);
}
@GraphQlClientApi
interface IntApi {
- int code();
+ int code(int in);
}
@Nested
@@ -326,9 +331,9 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
IntegerApi api = fixture.builder().build(IntegerApi.class);
- Integer code = api.code();
+ Integer code = api.code(3);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int) { code(in: $in) }");
then(code).isEqualTo(5);
}
@@ -338,7 +343,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + number);
IntegerApi api = fixture.builder().build(IntegerApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code(3), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Integer value for " + IntegerApi.class.getName() + "#code: " + number);
}
@@ -349,7 +354,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooSmall);
IntegerApi api = fixture.builder().build(IntegerApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code(3), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Integer value for " + IntegerApi.class.getName() + "#code: " + tooSmall);
}
@@ -360,7 +365,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooBig);
IntegerApi api = fixture.builder().build(IntegerApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code(3), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Integer value for " + IntegerApi.class.getName() + "#code: " + tooBig);
}
@@ -370,21 +375,21 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
IntApi api = fixture.builder().build(IntApi.class);
- int code = api.code();
+ int code = api.code(3);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: Int!) { code(in: $in) }");
then(code).isEqualTo(5);
}
}
@GraphQlClientApi
interface LongApi {
- Long code();
+ Long code(Long in);
}
@GraphQlClientApi
interface PrimitiveLongApi {
- long code();
+ long code(long in);
}
@Nested
@@ -394,9 +399,9 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
LongApi api = fixture.builder().build(LongApi.class);
- Long code = api.code();
+ Long code = api.code(7L);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: BigInteger) { code(in: $in) }");
then(code).isEqualTo(5L);
}
@@ -406,7 +411,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooSmall);
LongApi api = fixture.builder().build(LongApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code(7L), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Long value for " + LongApi.class.getName() + "#code: " + tooSmall);
}
@@ -417,7 +422,7 @@ class ScalarBehavior {
fixture.returnsData("'code':" + tooBig);
LongApi api = fixture.builder().build(LongApi.class);
- GraphQlClientException thrown = catchThrowableOfType(api::code, GraphQlClientException.class);
+ GraphQlClientException thrown = catchThrowableOfType(() -> api.code(7L), GraphQlClientException.class);
then(thrown).hasMessage("invalid java.lang.Long value for " + LongApi.class.getName() + "#code: " + tooBig);
}
@@ -427,21 +432,21 @@ class ScalarBehavior {
fixture.returnsData("'code':5");
PrimitiveLongApi api = fixture.builder().build(PrimitiveLongApi.class);
- long code = api.code();
+ long code = api.code(7L);
- then(fixture.query()).isEqualTo("query code { code }");
+ then(fixture.query()).isEqualTo("query code($in: BigInteger!) { code(in: $in) }");
then(code).isEqualTo(5L);
}
}
@GraphQlClientApi
interface FloatApi {
- Float number();
+ Float number(Float in);
}
@GraphQlClientApi
interface PrimitiveFloatApi {
- float number();
+ float number(float in);
}
@Nested
@@ -451,9 +456,9 @@ class ScalarBehavior {
fixture.returnsData("'number':123.456");
FloatApi api = fixture.builder().build(FloatApi.class);
- Float number = api.number();
+ Float number = api.number(7.8f);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: Float) { number(in: $in) }");
then(number).isEqualTo(123.456f);
}
@@ -462,21 +467,21 @@ class ScalarBehavior {
fixture.returnsData("'number':123.456");
PrimitiveFloatApi api = fixture.builder().build(PrimitiveFloatApi.class);
- float number = api.number();
+ float number = api.number(7.8f);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: Float!) { number(in: $in) }");
then(number).isEqualTo(123.456f);
}
}
@GraphQlClientApi
interface DoubleApi {
- Double number();
+ Double number(Double in);
}
@GraphQlClientApi
interface PrimitiveDoubleApi {
- double number();
+ double number(double in);
}
@Nested
@@ -486,9 +491,9 @@ class ScalarBehavior {
fixture.returnsData("'number':123.456");
DoubleApi api = fixture.builder().build(DoubleApi.class);
- Double number = api.number();
+ Double number = api.number(4.5);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: Float) { number(in: $in) }");
then(number).isEqualTo(123.456D);
}
@@ -497,16 +502,16 @@ class ScalarBehavior {
fixture.returnsData("'number':123.456");
PrimitiveDoubleApi api = fixture.builder().build(PrimitiveDoubleApi.class);
- double number = api.number();
+ double number = api.number(4.5);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: Float!) { number(in: $in) }");
then(number).isEqualTo(123.456D);
}
}
@GraphQlClientApi
interface BigIntegerApi {
- BigInteger number();
+ BigInteger number(BigInteger in);
}
@Nested
@@ -517,9 +522,9 @@ class ScalarBehavior {
fixture.returnsData("'number':" + reallyLongInteger);
BigIntegerApi api = fixture.builder().build(BigIntegerApi.class);
- BigInteger number = api.number();
+ BigInteger number = api.number(BigInteger.TEN);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: BigInteger) { number(in: $in) }");
then(number).isEqualTo(reallyLongInteger);
}
@@ -529,16 +534,16 @@ class ScalarBehavior {
fixture.returnsData("'number':" + notSoLongInteger);
BigIntegerApi api = fixture.builder().build(BigIntegerApi.class);
- BigInteger number = api.number();
+ BigInteger number = api.number(BigInteger.TEN);
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: BigInteger) { number(in: $in) }");
then(number).isEqualTo(notSoLongInteger);
}
}
@GraphQlClientApi
interface BigDecimalApi {
- BigDecimal number();
+ BigDecimal number(BigDecimal in);
}
@Nested
@@ -549,9 +554,9 @@ class ScalarBehavior {
fixture.returnsData("'number':" + reallyLongDecimal);
BigDecimalApi api = fixture.builder().build(BigDecimalApi.class);
- BigDecimal number = api.number();
+ BigDecimal number = api.number(BigDecimal.valueOf(12.34));
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: BigDecimal) { number(in: $in) }");
then(number).isEqualTo(reallyLongDecimal);
}
@@ -561,16 +566,29 @@ class ScalarBehavior {
fixture.returnsData("'number':" + notSoLongDecimal);
BigDecimalApi api = fixture.builder().build(BigDecimalApi.class);
- BigDecimal number = api.number();
+ BigDecimal number = api.number(BigDecimal.valueOf(12.34));
- then(fixture.query()).isEqualTo("query number { number }");
+ then(fixture.query()).isEqualTo("query number($in: BigDecimal) { number(in: $in) }");
then(number).isEqualTo(notSoLongDecimal);
}
}
@GraphQlClientApi
interface StringApi {
- String greeting();
+ String greeting(String in);
+ }
+
+ @GraphQlClientApi
+ interface IdApi {
+ @Id
+ String idea(@Id String in);
+ }
+
+ @GraphQlClientApi
+ interface NonNullIdApi {
+ @NonNull
+ @Id
+ String idea(@NonNull @Id String in);
}
@GraphQlClientApi
@@ -677,12 +695,34 @@ class ScalarBehavior {
fixture.returnsData("'greeting':'dummy-greeting'");
StringApi api = fixture.builder().build(StringApi.class);
- String greeting = api.greeting();
+ String greeting = api.greeting("in");
- then(fixture.query()).isEqualTo("query greeting { greeting }");
+ then(fixture.query()).isEqualTo("query greeting($in: String) { greeting(in: $in) }");
then(greeting).isEqualTo("dummy-greeting");
}
+ @Test
+ void shouldCallIdQuery() {
+ fixture.returnsData("'idea':'out'");
+ IdApi api = fixture.builder().build(IdApi.class);
+
+ String out = api.idea("in");
+
+ then(fixture.query()).isEqualTo("query idea($in: ID) { idea(in: $in) }");
+ then(out).isEqualTo("out");
+ }
+
+ @Test
+ void shouldCallNonNullIdQuery() {
+ fixture.returnsData("'idea':'out'");
+ NonNullIdApi api = fixture.builder().build(NonNullIdApi.class);
+
+ String out = api.idea("in");
+
+ then(fixture.query()).isEqualTo("query idea($in: ID!) { idea(in: $in) }");
+ then(out).isEqualTo("out");
+ }
+
@Test
void shouldCallScalarWithValueOfQuery() {
fixture.returnsData("'foo':123456");
@@ -763,59 +803,77 @@ class ScalarBehavior {
String getting();
}
- @Test
- void shouldCallStringGetterQuery() {
- fixture.returnsData("'greeting':'foo'");
- StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+ @Nested
+ class GetterBehavior {
+ @Test
+ void shouldCallStringGetterQuery() {
+ fixture.returnsData("'greeting':'foo'");
+ StringGettersApi api = fixture.builder().build(StringGettersApi.class);
- String value = api.getGreeting();
+ String value = api.getGreeting();
- then(fixture.query()).isEqualTo("query greeting { greeting }");
- then(value).isEqualTo("foo");
- }
+ then(fixture.query()).isEqualTo("query greeting { greeting }");
+ then(value).isEqualTo("foo");
+ }
- @Test
- void shouldCallJustGetQuery() {
- fixture.returnsData("'get':'foo'");
- StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+ @Test
+ void shouldCallJustGetQuery() {
+ fixture.returnsData("'get':'foo'");
+ StringGettersApi api = fixture.builder().build(StringGettersApi.class);
- String value = api.get();
+ String value = api.get();
- then(fixture.query()).isEqualTo("query get { get }");
- then(value).isEqualTo("foo");
- }
+ then(fixture.query()).isEqualTo("query get { get }");
+ then(value).isEqualTo("foo");
+ }
- @Test
- void shouldCallOneCharGetterQuery() {
- fixture.returnsData("'g':'foo'");
- StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+ @Test
+ void shouldCallOneCharGetterQuery() {
+ fixture.returnsData("'g':'foo'");
+ StringGettersApi api = fixture.builder().build(StringGettersApi.class);
- String value = api.getG();
+ String value = api.getG();
- then(fixture.query()).isEqualTo("query g { g }");
- then(value).isEqualTo("foo");
- }
+ then(fixture.query()).isEqualTo("query g { g }");
+ then(value).isEqualTo("foo");
+ }
- @Test
- void shouldCallGetAndOneLowerCharQuery() {
- fixture.returnsData("'gets':'foo'");
- StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+ @Test
+ void shouldCallGetAndOneLowerCharQuery() {
+ fixture.returnsData("'gets':'foo'");
+ StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+
+ String value = api.gets();
+
+ then(fixture.query()).isEqualTo("query gets { gets }");
+ then(value).isEqualTo("foo");
+ }
+
+ @Test
+ void shouldCallGetAndLowerCharsQuery() {
+ fixture.returnsData("'getting':'foo'");
+ StringGettersApi api = fixture.builder().build(StringGettersApi.class);
- String value = api.gets();
+ String value = api.getting();
- then(fixture.query()).isEqualTo("query gets { gets }");
- then(value).isEqualTo("foo");
+ then(fixture.query()).isEqualTo("query getting { getting }");
+ then(value).isEqualTo("foo");
+ }
}
- @Test
- void shouldCallGetAndLowerCharsQuery() {
- fixture.returnsData("'getting':'foo'");
- StringGettersApi api = fixture.builder().build(StringGettersApi.class);
+ @GraphQlClientApi
+ interface LocalDateApi {
+ LocalDate foo(LocalDate date);
+ }
- String value = api.getting();
+ @GraphQlClientApi
+ interface LocalTimeApi {
+ LocalTime foo(LocalTime date);
+ }
- then(fixture.query()).isEqualTo("query getting { getting }");
- then(value).isEqualTo("foo");
+ @GraphQlClientApi
+ interface OffsetTimeApi {
+ OffsetTime foo(OffsetTime date);
}
@GraphQlClientApi
@@ -823,18 +881,9 @@ class ScalarBehavior {
LocalDateTime foo(LocalDateTime date);
}
- @Test
- void shouldCallLocalDateTimeQuery() {
- LocalDateTime in = LocalDateTime.ofEpochSecond(123456789, 987654321, UTC);
- LocalDateTime out = LocalDateTime.ofEpochSecond(987654321, 123456789, UTC);
- fixture.returnsData("'foo':'" + out + "'");
- LocalDateTimeApi api = fixture.builder().build(LocalDateTimeApi.class);
-
- LocalDateTime value = api.foo(in);
-
- then(fixture.query()).isEqualTo("query foo($date: LocalDateTime) { foo(date: $date) }");
- then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
- then(value).isEqualTo(out);
+ @GraphQlClientApi
+ interface OffsetDateTimeApi {
+ OffsetDateTime foo(OffsetDateTime date);
}
@GraphQlClientApi
@@ -842,57 +891,128 @@ class ScalarBehavior {
ZonedDateTime foo(ZonedDateTime date);
}
- @Test
- void shouldCallZonedDateTimeQuery() {
- ZonedDateTime in = ZonedDateTime.of(2020, 10, 9, 15, 58, 21, 0, ZoneOffset.ofHours(2));
- ZonedDateTime out = ZonedDateTime.of(2018, 1, 9, 1, 2, 3, 4, ZoneOffset.ofHours(-2));
- fixture.returnsData("'foo':'" + out + "'");
- ZonedDateTimeApi api = fixture.builder().build(ZonedDateTimeApi.class);
-
- ZonedDateTime value = api.foo(in);
-
- then(fixture.query()).isEqualTo("query foo($date: ZonedDateTime) { foo(date: $date) }");
- then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
- then(value).isEqualTo(out);
- }
-
@GraphQlClientApi
interface InstantApi {
Instant foo(Instant instant);
}
- @Test
- void shouldCallInstantQuery() {
- Instant in = Instant.ofEpochMilli(123456789);
- Instant out = Instant.ofEpochMilli(987654321);
- fixture.returnsData("'foo':'" + out + "'");
- InstantApi api = fixture.builder().build(InstantApi.class);
-
- Instant value = api.foo(in);
-
- then(fixture.query()).isEqualTo("query foo($instant: Instant) { foo(instant: $instant) }");
- then(fixture.variables()).isEqualTo("{'instant':'" + in + "'}");
- then(value).isEqualTo(out);
- }
-
@GraphQlClientApi
interface DateApi {
Date foo(Date date);
}
- @Test
- void shouldCallDateQuery() {
- Instant in = Instant.ofEpochMilli(123456789);
- Instant out = Instant.ofEpochMilli(987654321);
- fixture.returnsData("'foo':'" + out + "'");
- DateApi api = fixture.builder().build(DateApi.class);
+ @Nested
+ class DateAndTimeBehavior {
+ @Test
+ void shouldCallLocalDateQuery() {
+ LocalDate in = LocalDate.of(2020, 10, 31);
+ LocalDate out = LocalDate.of(3000, 1, 1);
+ fixture.returnsData("'foo':'" + out + "'");
+ LocalDateApi api = fixture.builder().build(LocalDateApi.class);
- Date value = api.foo(Date.from(in));
+ LocalDate value = api.foo(in);
- then(fixture.query()).isEqualTo("query foo($date: Date) { foo(date: $date) }");
- then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
- then(value).isEqualTo(Date.from(out));
- }
+ then(fixture.query()).isEqualTo("query foo($date: Date) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallLocalTimeQuery() {
+ LocalTime in = LocalTime.of(12, 34, 56);
+ LocalTime out = LocalTime.of(21, 43, 55);
+ fixture.returnsData("'foo':'" + out + "'");
+ LocalTimeApi api = fixture.builder().build(LocalTimeApi.class);
+
+ LocalTime value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($date: Time) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallOffsetTimeQuery() {
+ OffsetTime in = OffsetTime.of(12, 34, 56, 789, ZoneOffset.ofHours(3));
+ OffsetTime out = OffsetTime.of(21, 43, 55, 987, ZoneOffset.ofHours(5));
+ fixture.returnsData("'foo':'" + out + "'");
+ OffsetTimeApi api = fixture.builder().build(OffsetTimeApi.class);
+
+ OffsetTime value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($date: Time) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallLocalDateTimeQuery() {
+ LocalDateTime in = LocalDateTime.of(2020, 10, 9, 15, 58, 21, 0);
+ LocalDateTime out = LocalDateTime.of(2018, 1, 9, 1, 2, 3, 4);
+ fixture.returnsData("'foo':'" + out + "'");
+ LocalDateTimeApi api = fixture.builder().build(LocalDateTimeApi.class);
+
+ LocalDateTime value = api.foo(in);
- // there's only special code for Date; java.time works out of the box, so we dont' test them all
+ then(fixture.query()).isEqualTo("query foo($date: DateTime) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallOffsetDateTimeQuery() {
+ OffsetDateTime in = OffsetDateTime.of(2020, 10, 9, 15, 58, 21, 0, ZoneOffset.ofHours(2));
+ OffsetDateTime out = OffsetDateTime.of(2018, 1, 9, 1, 2, 3, 4, ZoneOffset.ofHours(-2));
+ fixture.returnsData("'foo':'" + out + "'");
+ OffsetDateTimeApi api = fixture.builder().build(OffsetDateTimeApi.class);
+
+ OffsetDateTime value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($date: DateTime) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallZonedDateTimeQuery() {
+ ZonedDateTime in = ZonedDateTime.of(2020, 10, 31, 12, 34, 56, 789, ZoneOffset.ofHours(3));
+ ZonedDateTime out = ZonedDateTime.of(3000, 1, 13, 21, 43, 55, 987, UTC);
+ fixture.returnsData("'foo':'" + out + "'");
+ ZonedDateTimeApi api = fixture.builder().build(ZonedDateTimeApi.class);
+
+ ZonedDateTime value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($date: DateTime) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallInstantQuery() {
+ Instant in = Instant.ofEpochMilli(123456789);
+ Instant out = Instant.ofEpochMilli(987654321);
+ fixture.returnsData("'foo':'" + out + "'");
+ InstantApi api = fixture.builder().build(InstantApi.class);
+
+ Instant value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($instant: Instant) { foo(instant: $instant) }");
+ then(fixture.variables()).isEqualTo("{'instant':'" + in + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallDateQuery() {
+ Instant in = Instant.ofEpochMilli(123456789);
+ Instant out = Instant.ofEpochMilli(987654321);
+ fixture.returnsData("'foo':'" + out + "'");
+ DateApi api = fixture.builder().build(DateApi.class);
+
+ Date value = api.foo(Date.from(in));
+
+ then(fixture.query()).isEqualTo("query foo($date: Date) { foo(date: $date) }");
+ then(fixture.variables()).isEqualTo("{'date':'" + in + "'}");
+ then(value).isEqualTo(Date.from(out));
+ }
+ }
} | ['client/implementation/src/test/java/test/unit/ScalarBehavior.java', 'client/implementation/src/test/java/test/unit/ParametersBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 573,347 | 112,600 | 16,407 | 196 | 1,357 | 242 | 41 | 1 | 409 | 44 | 103 | 15 | 1 | 1 | 1970-01-01T00:26:44 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,087 | smallrye/smallrye-graphql/950/946 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/946 | https://github.com/smallrye/smallrye-graphql/pull/950 | https://github.com/smallrye/smallrye-graphql/pull/950 | 1 | fix | Typesafe Client: JsonException when a GraphQL error has a path that is actually `null` | ```json
{
"errors": [
{
"message": "some error message",
"locations": [{"line": 1, "column": 256}],
"path": ["foo", "bar", "baz"],
}
],
"data": {
"foo": [
{
"bar": null,
}
]
}
}
```
There was an error in `foo/bar/baz` that resulted in `foo/bar` being `null`.
⇒ The typesafe client fails with `javax.json.JsonException: The reference value in a JSON Pointer must be a JSON Object or a JSON Array, was 'NULL'` in `ResultBuilder` line 74 when trying to apply an `ErrorOr`.
Expected: don't apply the `ErrorOr` | ff1a3e5c74a216751dda31c60b2789cfab5be89f | 821067e6127dfaaf63cc8520f45f0d0b94c0cdb6 | https://github.com/smallrye/smallrye-graphql/compare/ff1a3e5c74a216751dda31c60b2789cfab5be89f...821067e6127dfaaf63cc8520f45f0d0b94c0cdb6 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
index c4e14be5..dbf0cbbf 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
@@ -13,6 +13,7 @@ import java.util.stream.Collectors;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
+import javax.json.JsonException;
import javax.json.JsonObject;
import javax.json.JsonPatch;
import javax.json.JsonPointer;
@@ -72,6 +73,8 @@ public class ResultBuilder {
if (data == null || path == null)
return false;
JsonPointer pointer = Json.createPointer(path.stream().map(Object::toString).collect(joining("/", "/", "")));
+ if (!exists(pointer))
+ return false;
JsonArrayBuilder errors = Json.createArrayBuilder();
if (pointer.containsValue(data) && isListOf(pointer.getValue(data), ErrorOr.class.getSimpleName()))
pointer.getValue(data).asJsonArray().forEach(errors::add);
@@ -80,6 +83,15 @@ public class ResultBuilder {
return true;
}
+ private boolean exists(JsonPointer pointer) {
+ try {
+ pointer.containsValue(data);
+ return true;
+ } catch (JsonException e) {
+ return false;
+ }
+ }
+
private GraphQLClientError convert(JsonValue jsonValue) {
JsonObject jsonObject = jsonValue.asJsonObject();
return new GraphQLClientError() {
diff --git a/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java b/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
index 1660660a..beea0c32 100644
--- a/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
+++ b/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
@@ -458,6 +458,34 @@ class ErrorBehavior {
then(error.getErrorCode()).isEqualTo("team-search-disabled");
}
+ @Test
+ void shouldFetchErrorOrWithNullInPath() {
+ fixture.returns("{" +
+ "\\"data\\":{\\"teams\\":null}," +
+ "\\"errors\\":[{" +
+ /**/"\\"message\\":\\"can't get team name\\"," +
+ /**/"\\"locations\\":[{\\"line\\":1,\\"column\\":2,\\"sourceName\\":\\"loc\\"}]," +
+ /**/"\\"path\\": [\\"teams\\",\\"name\\"],\\n" +
+ /**/"\\"extensions\\":{" +
+ /**//**/"\\"code\\":\\"team-name-disabled\\"}" +
+ "}]}}");
+ SuperHeroApi api = fixture.build(SuperHeroApi.class);
+
+ GraphQLClientException throwable = catchThrowableOfType(api::teams, GraphQLClientException.class);
+
+ then(fixture.query()).isEqualTo("query teams { teams {name} }");
+ then(throwable).hasMessage("errors from service");
+ then(throwable).hasToString("GraphQlClientException: errors from service\\n" +
+ "errors:\\n" +
+ "- team-name-disabled: [teams, name] can't get team name [(1:2@loc)] {code=team-name-disabled})");
+ then(throwable.getErrors()).hasSize(1);
+ GraphQLClientError error = throwable.getErrors().get(0);
+ then(error.getMessage()).isEqualTo("can't get team name");
+ then(error.getPath()).containsExactly("teams", "name");
+ then(error.getLocations()).containsExactly(new SourceLocation(1, 2, "loc"));
+ then(error.getErrorCode()).isEqualTo("team-name-disabled");
+ }
+
static class Wrapper {
@Name("findHeroes")
List<ErrorOr<SuperHero>> superHeroes; | ['client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,100,440 | 216,642 | 31,389 | 341 | 1,777 | 382 | 40 | 2 | 593 | 84 | 169 | 24 | 0 | 1 | 1970-01-01T00:27:08 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,088 | smallrye/smallrye-graphql/907/906 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/906 | https://github.com/smallrye/smallrye-graphql/pull/907 | https://github.com/smallrye/smallrye-graphql/pull/907 | 1 | fix | Handle errors with a `null` error `message` | The typesafe client throws a `java.lang.ClassCastException: class javax.json.JsonValueImpl cannot be cast to class javax.json.JsonString` if the response is something like:
```json
{
"errors": [
{
"message": null,
"locations": [
...
```
Reason: the `ResultBuilder` simply returns `jsonObject.getString("message");` which fails on a `JsonValue.NULL`. | bac875fe4eba77afa60d25013c47e100475d2c6e | 43ea1fff9f297b2e8d1c05f6a34032fab892163d | https://github.com/smallrye/smallrye-graphql/compare/bac875fe4eba77afa60d25013c47e100475d2c6e...43ea1fff9f297b2e8d1c05f6a34032fab892163d | diff --git a/client/api/src/main/java/io/smallrye/graphql/client/typesafe/api/GraphQLClientError.java b/client/api/src/main/java/io/smallrye/graphql/client/typesafe/api/GraphQLClientError.java
index 953adec1..06c00e48 100644
--- a/client/api/src/main/java/io/smallrye/graphql/client/typesafe/api/GraphQLClientError.java
+++ b/client/api/src/main/java/io/smallrye/graphql/client/typesafe/api/GraphQLClientError.java
@@ -27,12 +27,13 @@ public interface GraphQLClientError {
default String defaultToString() {
String errorCode = getErrorCode();
List<Object> path = getPath();
+ String message = getMessage();
List<SourceLocation> locations = getLocations();
Map<String, Object> extensions = getExtensions();
- return ((errorCode == null) ? "" : errorCode + ": ")
+ return (((errorCode == null) ? "" : errorCode + ": ")
+ ((path == null) ? "" : path + " ")
- + getMessage()
- + ((locations == null) ? "" : " " + locations)
- + ((extensions == null || extensions.isEmpty()) ? "" : " " + extensions);
+ + ((message == null) ? "" : message + " ")
+ + ((locations == null) ? "" : locations + " ")
+ + ((extensions == null || extensions.isEmpty()) ? "" : extensions + " ")).trim();
}
}
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
index 8235ee8f..d3a70f41 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java
@@ -85,7 +85,7 @@ public class ResultBuilder {
return new GraphQLClientError() {
@Override
public String getMessage() {
- return jsonObject.getString("message");
+ return jsonObject.getString("message", null);
}
@Override
diff --git a/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java b/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
index df33d2a2..1660660a 100644
--- a/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
+++ b/client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java
@@ -107,6 +107,68 @@ class ErrorBehavior {
then(error.getExtensions().get("classification")).isEqualTo("ValidationError");
}
+ @Test
+ void shouldFailOnErrorWithoutMessage() {
+ fixture.returns("{" +
+ "\\"data\\":{\\"greeting\\":null}," +
+ "\\"errors\\":[{" +
+ /**/"\\"locations\\":[{\\"line\\":1,\\"column\\":2,\\"sourceName\\":\\"loc\\"}]," +
+ /**/"\\"path\\": [\\"greeting\\"],\\n" +
+ /**/"\\"extensions\\":{" +
+ /**//**/"\\"description\\":\\"some description\\"," +
+ /**//**/"\\"queryPath\\":[\\"greeting\\"]," +
+ /**//**/"\\"classification\\":\\"DataFetchingException\\"," +
+ /**//**/"\\"code\\":\\"no-greeting\\"}" +
+ "}]}}");
+ StringApi api = fixture.build(StringApi.class);
+
+ GraphQLClientException thrown = catchThrowableOfType(api::greeting, GraphQLClientException.class);
+
+ then(thrown).hasMessage("errors from service (and we can't apply them to a java.lang.String value for" +
+ " tck.graphql.typesafe.ErrorBehavior$StringApi#greeting; see ErrorOr)");
+ then(thrown).hasToString(
+ "GraphQlClientException: errors from service (and we can't apply them to a java.lang.String value for "
+ + StringApi.class.getName() + "#greeting; see ErrorOr)\\n" +
+ "errors:\\n" +
+ "- no-greeting: [greeting] [(1:2@loc)]" +
+ " {description=some description, queryPath=[greeting], classification=DataFetchingException, code=no-greeting})");
+ then(thrown.getErrors()).hasSize(1);
+ GraphQLClientError error = thrown.getErrors().get(0);
+ then(error.getMessage()).isNull();
+ then(error.getLocations()).containsExactly(new SourceLocation(1, 2, "loc"));
+ then(error.getPath()).containsExactly("greeting");
+ then(error.getErrorCode()).isEqualTo("no-greeting");
+ }
+
+ @Test
+ void shouldFailOnErrorWithNullMessage() {
+ fixture.returns("{\\n" +
+ " \\"errors\\": [\\n" +
+ " {\\n" +
+ " \\"message\\": null,\\n" +
+ " \\"extensions\\": {\\n" +
+ " \\"classification\\": \\"SomeClassification\\"\\n" +
+ " }\\n" +
+ " }\\n" +
+ " ],\\n" +
+ " \\"data\\": null\\n" +
+ "}\\n");
+ StringApi api = fixture.build(StringApi.class);
+
+ GraphQLClientException thrown = catchThrowableOfType(api::greeting, GraphQLClientException.class);
+
+ then(thrown).hasMessage("errors from service");
+ then(thrown).hasToString("GraphQlClientException: errors from service\\n" +
+ "errors:\\n" +
+ "- {classification=SomeClassification})");
+ then(thrown.getErrors()).hasSize(1);
+ GraphQLClientError error = thrown.getErrors().get(0);
+ then(error.getMessage()).isNull();
+ then(error.getLocations()).isNull();
+ then(error.getErrorCode()).isNull();
+ then(error.getExtensions().get("classification")).isEqualTo("SomeClassification");
+ }
+
@Test
void shouldFailOnErrorWithoutExtensions() {
fixture.returns("{\\n" + | ['client/tck/src/main/java/tck/graphql/typesafe/ErrorBehavior.java', 'client/api/src/main/java/io/smallrye/graphql/client/typesafe/api/GraphQLClientError.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/ResultBuilder.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,069,456 | 210,808 | 30,568 | 325 | 3,792 | 795 | 73 | 3 | 383 | 42 | 86 | 12 | 0 | 1 | 1970-01-01T00:27:05 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,097 | smallrye/smallrye-graphql/340/337 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/337 | https://github.com/smallrye/smallrye-graphql/pull/340 | https://github.com/smallrye/smallrye-graphql/pull/340 | 1 | fix | Support java.util.Date in queries from smallrye-graphql-client | I am using this library to fetch data from GitHub and I noticed that some types are not assembled correctly. This is what I have so far:
```java
@GraphQlClientApi(endpoint = "https://api.github.com/graphql")
@AuthorizationHeader(confPrefix = "github", type = AuthorizationHeader.Type.BEARER)
public interface GitHub {
@Query("repository")
Repository repository(String owner, String name);
}
```
This is the `Repository` class:
```java
public class Repository {
public String id;
public java.util.Date createdAt;
}
```
This is my test:
```java
@Test
void test() {
final GitHub github = GraphQlClientBuilder.newBuilder().build(GitHub.class);
System.out.println(github.repository("gastaldi", "backport-repo"));
}
```
And this is the error I get:
```java
io.smallrye.graphql.client.typesafe.api.GraphQlClientException: errors from service: [{"message":"Parse error on \\"}\\" (RCURLY) at [1, 77]","locations":[{"line":1,"column":77}]}]:
{"query":"query { repository(owner: \\"gastaldi\\", name: \\"backport-repo\\") {id createdAt {}} }"}
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientProxy.readResponse(GraphQlClientProxy.java:128)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientProxy.fromJson(GraphQlClientProxy.java:120)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientProxy.invoke(GraphQlClientProxy.java:52)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientBuilderImpl.lambda$build$0(GraphQlClientBuilderImpl.java:45)
at com.sun.proxy.$Proxy18.repository(Unknown Source)
at io.quarkus.backports.GraphQLTest.test(GraphQLTest.java:12)
```
| af0555254f21ab09bad79a37d2091087c4cac3dd | c1ca136b97d818dc91e42894ca9381437f974c43 | https://github.com/smallrye/smallrye-graphql/compare/af0555254f21ab09bad79a37d2091087c4cac3dd...c1ca136b97d818dc91e42894ca9381437f974c43 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
index a4f150bf..41f30933 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
@@ -48,10 +48,9 @@ public class GraphQlClientBuilderImpl implements GraphQlClientBuilder {
}
private ClassLoader getClassLoader(Class<?> apiClass) {
- if (System.getSecurityManager() == null) {
+ if (System.getSecurityManager() == null)
return apiClass.getClassLoader();
- }
- return AccessController.doPrivileged((PrivilegedAction<ClassLoader>) () -> apiClass.getClassLoader());
+ return AccessController.doPrivileged((PrivilegedAction<ClassLoader>) apiClass::getClassLoader);
}
private void readConfig(GraphQlClientApi annotation) {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
index bd200d63..93fd8578 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
@@ -83,15 +83,13 @@ class GraphQlClientProxy {
while (type.isOptional())
type = type.getItemType();
- if (type.isScalar()) {
+ if (type.isScalar())
return "";
- } else if (type.isCollection()) {
+ if (type.isCollection())
return fields(type.getItemType());
- } else {
- return type.fields()
- .map(this::field)
- .collect(joining(" ", " {", "}"));
- }
+ return type.fields()
+ .map(this::field)
+ .collect(joining(" ", " {", "}"));
}
private String field(FieldInfo field) {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
index be3be778..3b6b5efb 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
@@ -42,23 +42,27 @@ class RequestBuilder {
private void buildParam(TypeInfo type, Object value) {
if (value == null)
request.append("null");
- else if (value instanceof Boolean || value instanceof Number || type.isEnum())
- request.append(value);
else if (type.isScalar())
- buildScalarParam(value);
+ buildScalarParam(type, value);
else if (type.isCollection())
buildArrayParam(type.getItemType(), (List<?>) value);
else
buildObjectParam(type, value);
}
- private void buildScalarParam(Object value) {
- request
- .append("\\"")
- .append(value.toString()
- .replace("\\"", "\\\\\\"")
- .replace("\\n", "\\\\n"))
- .append("\\"");
+ private void buildScalarParam(TypeInfo type, Object value) {
+ boolean quoted = !unquoted(type);
+ if (quoted)
+ request.append("\\"");
+ request.append(type.stringValue(value));
+ if (quoted)
+ request.append("\\"");
+ }
+
+ public boolean unquoted(TypeInfo type) {
+ return type.isPrimitive()
+ || Number.class.isAssignableFrom(type.getRawType())
+ || type.isEnum();
}
private void buildArrayParam(TypeInfo itemType, List<?> values) {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
index 7d90e80a..66f12fed 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
@@ -1,5 +1,7 @@
package io.smallrye.graphql.client.typesafe.impl.json;
+import java.time.Instant;
+
import javax.json.JsonString;
import io.smallrye.graphql.client.typesafe.api.GraphQlClientException;
@@ -24,6 +26,9 @@ class JsonStringReader extends Reader<JsonString> {
//noinspection rawtypes,unchecked
return Enum.valueOf((Class) type.getRawType(), value.getString());
+ if (java.util.Date.class.equals(this.type.getRawType()))
+ return java.util.Date.from(Instant.parse(value.getString()));
+
ConstructionInfo constructor = type.scalarConstructor()
.orElseThrow(() -> new GraphQlClientValueException(location, value));
try {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
index bb0a1cd7..b9cc7915 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
@@ -4,7 +4,6 @@ import static java.lang.reflect.Modifier.isStatic;
import static java.lang.reflect.Modifier.isTransient;
import static java.util.Objects.requireNonNull;
-import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedType;
import java.lang.reflect.Constructor;
import java.lang.reflect.Executable;
@@ -111,10 +110,9 @@ public class TypeInfo {
}
private Field[] getDeclaredFields(Class<?> type) {
- if (System.getSecurityManager() == null) {
+ if (System.getSecurityManager() == null)
return type.getDeclaredFields();
- }
- return AccessController.doPrivileged((PrivilegedAction<Field[]>) () -> type.getDeclaredFields());
+ return AccessController.doPrivileged((PrivilegedAction<Field[]>) type::getDeclaredFields);
}
private boolean isGraphQlField(Field field) {
@@ -127,10 +125,11 @@ public class TypeInfo {
public boolean isScalar() {
return isPrimitive()
- || Character.class.equals(getRawType()) // has a valueOf(char), not valueOf(String)
|| Number.class.isAssignableFrom(getRawType())
- || CharSequence.class.isAssignableFrom(getRawType())
|| isEnum()
+ || CharSequence.class.isAssignableFrom(getRawType())
+ || Character.class.equals(getRawType()) // has a valueOf(char), not valueOf(String)
+ || java.util.Date.class.equals(getRawType())
|| scalarConstructor().isPresent();
}
@@ -143,15 +142,12 @@ public class TypeInfo {
}
public Optional<ConstructionInfo> scalarConstructor() {
- return Stream.of(getRawType().getMethods()).filter(this::isStaticStringConstructor)
+ return Stream.of(getRawType().getMethods())
+ .filter(this::isStaticStringConstructor)
.findFirst()
.map(ConstructionInfo::new);
}
- private boolean hasOneStringParameter(Executable executable) {
- return executable.getParameterCount() == 1 && CharSequence.class.isAssignableFrom(executable.getParameterTypes()[0]);
- }
-
private boolean isStaticStringConstructor(Method method) {
return isStaticConstructorMethodNamed(method, "of")
|| isStaticConstructorMethodNamed(method, "valueOf")
@@ -165,6 +161,18 @@ public class TypeInfo {
&& hasOneStringParameter(method);
}
+ private boolean hasOneStringParameter(Executable executable) {
+ return executable.getParameterCount() == 1 && CharSequence.class.isAssignableFrom(executable.getParameterTypes()[0]);
+ }
+
+ public String stringValue(Object value) {
+ if (java.util.Date.class.equals(getRawType()))
+ return ((java.util.Date) value).toInstant().toString();
+ return value.toString()
+ .replace("\\"", "\\\\\\"")
+ .replace("\\n", "\\\\n");
+ }
+
public Object newInstance() {
try {
Constructor<?> noArgsConstructor = getDeclaredConstructor(getRawType());
@@ -180,8 +188,7 @@ public class TypeInfo {
return type.getDeclaredConstructor();
}
try {
- return AccessController
- .doPrivileged((PrivilegedExceptionAction<Constructor<?>>) () -> type.getDeclaredConstructor());
+ return AccessController.doPrivileged((PrivilegedExceptionAction<Constructor<?>>) type::getDeclaredConstructor);
} catch (PrivilegedActionException pae) {
if (pae.getCause() instanceof NoSuchMethodException) {
throw (NoSuchMethodException) pae.getCause();
@@ -229,28 +236,22 @@ public class TypeInfo {
throw new GraphQlClientException("unsupported reflection type " + type.getClass());
}
- public <A extends Annotation> Stream<A> getAnnotations(Class<A> type) {
- return Stream.of(((Class<?>) this.type).getAnnotationsByType(type));
+ public Optional<MethodInfo> getMethod(String name, Class<?>... args) {
+ return getDeclaredMethod((Class<?>) this.type, name, args)
+ .map(MethodInfo::of);
}
- public Optional<MethodInfo> getMethod(String name) {
+ private Optional<Method> getDeclaredMethod(Class<?> type, String name, Class<?>... args) {
try {
- return Optional.of(MethodInfo.of(getDeclaredMethod((Class<?>) this.type, name)));
+ if (System.getSecurityManager() == null)
+ return Optional.of(type.getDeclaredMethod(name, args));
+ return Optional.of(AccessController
+ .doPrivileged((PrivilegedExceptionAction<Method>) () -> type.getDeclaredMethod(name, args)));
} catch (NoSuchMethodException e) {
return Optional.empty();
- }
- }
-
- private Method getDeclaredMethod(Class<?> type, String name) throws NoSuchMethodException {
- if (System.getSecurityManager() == null) {
- return type.getDeclaredMethod(name);
- }
- try {
- return AccessController.doPrivileged((PrivilegedExceptionAction<Method>) () -> type.getDeclaredMethod(name));
} catch (PrivilegedActionException pae) {
- if (pae.getCause() instanceof NoSuchMethodException) {
- throw (NoSuchMethodException) pae.getCause();
- }
+ if (pae.getCause() instanceof NoSuchMethodException)
+ return Optional.empty();
throw new RuntimeException(pae.getCause());
}
}
diff --git a/client/implementation/src/test/java/test/unit/ScalarBehavior.java b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
index 887adfe6..790ba3a9 100644
--- a/client/implementation/src/test/java/test/unit/ScalarBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
@@ -6,7 +6,9 @@ import static org.assertj.core.api.BDDAssertions.then;
import java.math.BigDecimal;
import java.math.BigInteger;
+import java.time.Instant;
import java.time.LocalDate;
+import java.util.Date;
import javax.ws.rs.core.Response;
@@ -845,4 +847,22 @@ class ScalarBehavior {
then(fixture.query()).isEqualTo("getting");
then(value).isEqualTo("foo");
}
+
+ @GraphQlClientApi
+ interface DateApi {
+ Date foo(Date date);
+ }
+
+ @Test
+ void shouldCallDateQuery() {
+ Instant in = Instant.ofEpochMilli(123456789);
+ Instant out = Instant.ofEpochMilli(987654321);
+ fixture.returnsData("'foo':'" + out + "'");
+ DateApi api = fixture.builder().build(DateApi.class);
+
+ Date value = api.foo(Date.from(in));
+
+ then(fixture.query()).isEqualTo("foo(date: '" + in + "')");
+ then(value).isEqualTo(Date.from(out));
+ }
} | ['client/implementation/src/test/java/test/unit/ScalarBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java'] | {'.java': 6} | 6 | 6 | 0 | 0 | 6 | 499,807 | 97,843 | 14,420 | 178 | 5,301 | 1,003 | 105 | 5 | 1,686 | 129 | 415 | 48 | 1 | 4 | 1970-01-01T00:26:35 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,096 | smallrye/smallrye-graphql/388/387 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/387 | https://github.com/smallrye/smallrye-graphql/pull/388 | https://github.com/smallrye/smallrye-graphql/pull/388 | 1 | fix | NPE: some validation errors don't provide a query path | Some validation errors don't provide a query path, e.g. `LoneAnonymousOperation`. This results in a NPE in the call to `toJsonArray` in line 75 of the `ExecutionErrorsService`.
Would it be better to provide an empty path or null? | 8d00415f83912e5c5b222c90b298ec0e585ed8dd | c47f2ba2398ab9e8889b06d28d1c4d0f55cfc70f | https://github.com/smallrye/smallrye-graphql/compare/8d00415f83912e5c5b222c90b298ec0e585ed8dd...c47f2ba2398ab9e8889b06d28d1c4d0f55cfc70f | diff --git a/server/implementation/src/main/java/io/smallrye/graphql/execution/error/ExecutionErrorsService.java b/server/implementation/src/main/java/io/smallrye/graphql/execution/error/ExecutionErrorsService.java
index a0df1b7f..372a08cf 100644
--- a/server/implementation/src/main/java/io/smallrye/graphql/execution/error/ExecutionErrorsService.java
+++ b/server/implementation/src/main/java/io/smallrye/graphql/execution/error/ExecutionErrorsService.java
@@ -117,9 +117,11 @@ public class ExecutionErrorsService {
private JsonArray toJsonArray(List<?> list) {
JsonArrayBuilder arrayBuilder = jsonBuilderFactory.createArrayBuilder();
- for (Object o : list) {
- if (o != null)
- arrayBuilder.add(o.toString());
+ if (list != null && !list.isEmpty()) {
+ for (Object o : list) {
+ if (o != null)
+ arrayBuilder.add(o.toString());
+ }
}
return arrayBuilder.build();
} | ['server/implementation/src/main/java/io/smallrye/graphql/execution/error/ExecutionErrorsService.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 529,291 | 103,766 | 15,184 | 181 | 294 | 60 | 8 | 1 | 232 | 37 | 57 | 3 | 0 | 0 | 1970-01-01T00:26:38 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,089 | smallrye/smallrye-graphql/659/654 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/654 | https://github.com/smallrye/smallrye-graphql/pull/659 | https://github.com/smallrye/smallrye-graphql/pull/659 | 1 | fix | Issue with GraphQLClientApi Mutation | I have a simple GraphQL server and a Kotlin client as follows:
```
@ApplicationScoped
class PeopleService(
@ConfigProperty(name = "people.graphql.url")
private val peopleApiUrl: String
) {
var peopleApi: PeopleApi = GraphQlClientBuilder.newBuilder()
.endpoint(peopleApiUrl)
.build(PeopleApi::class.java)
fun getPerson(ssn: String): Person = peopleApi.person(ssn)
fun createPerson(person: PeopleApi.PersonData): PeopleApi.PersonData = peopleApi.createPerson(person)
}
@GraphQlClientApi
interface PeopleApi {
@Query
fun person(
@Name("id") id: String,
): Person
@Mutation(value = "createPerson")
fun createPerson(
@Name("personDetails")
personDetails: PersonData
): PersonData
@Input
data class PersonData(
var firstNames: String,
var lastName: String,
)
}
```
The server works when using the graphql-ui with the mutation:
```
mutation addKalle {
createPerson(personDetails: {
firstNames: "Kalervo",
lastName: "Jankko"
}) {
firstNames
lastName
}
}
```
But my Kotlin client does not send the same mutation. This is what comes to the server log:
```
2021-02-24 20:12:36,859 DEBUG [not.gra.GraphQL] (vert.x-worker-thread-15) Executing request. operation name: 'createPerson'. query: 'mutation createPerson($personDetails: ) { createPerson(personDetails: $personDetails) {firstNames lastName} }'. variables '{personDetails={lastName=Jankko, firstNames=Kalervo}}'
2021-02-24 20:12:36,859 DEBUG [not.gra.GraphQL] (vert.x-worker-thread-15) Parsing query: 'mutation createPerson($personDetails: ) { createPerson(personDetails: $personDetails) {firstNames lastName} }'...
2021-02-24 20:12:36,860 WARN [not.gra.GraphQL] (vert.x-worker-thread-15) Query failed to parse : 'mutation createPerson($personDetails: ) { createPerson(personDetails: $personDetails) {firstNames lastName} }'
```
Why the name createPerson is there twice?
When debugging the client I also noticed that in io.smallrye.graphql.client.typesafe.impl.QueryBuilder
```
private String declare(ParameterInfo parameter) {
return "$" + parameter.getName() + ": " + parameter.graphQlInputTypeName();
}
```
graphQlInputTypeName is an empty string..
The query of my Kotlin client works fine against the server. How to make the mutation work? | fba8ff73cef0f6e559cbed2f45f21be257a11f4a | 6e6400a41da883d6cb589e6895d44b9b4dedc0b7 | https://github.com/smallrye/smallrye-graphql/compare/fba8ff73cef0f6e559cbed2f45f21be257a11f4a...6e6400a41da883d6cb589e6895d44b9b4dedc0b7 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
index 33c705a2..6c3715bc 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
@@ -44,8 +44,12 @@ public class ParameterInfo {
}
private String graphQlInputTypeName(TypeInfo type) {
- if (type.isAnnotated(Input.class))
- return type.getAnnotation(Input.class).value();
+ if (type.isAnnotated(Input.class)) {
+ String value = type.getAnnotation(Input.class).value();
+ if (!value.isEmpty()) {
+ return value;
+ }
+ }
if (type.isAnnotated(Name.class))
return type.getAnnotation(Name.class).value();
switch (type.getSimpleName()) {
diff --git a/client/implementation/src/test/java/test/unit/ParametersBehavior.java b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
index 3384ead7..9d65da18 100644
--- a/client/implementation/src/test/java/test/unit/ParametersBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
@@ -202,6 +202,39 @@ class ParametersBehavior {
then(greeting.count).isEqualTo(3);
}
+ @GraphQlClientApi
+ interface EmptyInputObjectParamApi {
+ EmptyInputGreeting say(EmptyInputGreeting greet);
+ }
+
+ @Input
+ private static class EmptyInputGreeting {
+ String text;
+ int count;
+
+ @SuppressWarnings("unused")
+ EmptyInputGreeting() {
+ }
+
+ EmptyInputGreeting(String text, int count) {
+ this.text = text;
+ this.count = count;
+ }
+ }
+
+ @Test
+ void shouldCallEmptyInputObjectParamQuery() {
+ fixture.returnsData("'say':{'text':'ho','count':3}");
+ EmptyInputObjectParamApi api = fixture.build(EmptyInputObjectParamApi.class);
+
+ EmptyInputGreeting greeting = api.say(new EmptyInputGreeting("hi", 5));
+
+ then(fixture.query()).isEqualTo("query say($greet: EmptyInputGreetingInput) { say(greet: $greet) {text count} }");
+ then(fixture.variables()).isEqualTo("{'greet':{'text':'hi','count':5}}");
+ then(greeting.text).isEqualTo("ho");
+ then(greeting.count).isEqualTo(3);
+ }
+
@GraphQlClientApi
interface NamedTypeObjectParamApi {
NamedTypeGreeting say(NamedTypeGreeting greet); | ['client/implementation/src/test/java/test/unit/ParametersBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 687,044 | 134,653 | 19,593 | 225 | 313 | 58 | 8 | 1 | 2,422 | 241 | 602 | 71 | 0 | 4 | 1970-01-01T00:26:54 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,084 | smallrye/smallrye-graphql/1103/1102 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/1102 | https://github.com/smallrye/smallrye-graphql/pull/1103 | https://github.com/smallrye/smallrye-graphql/pull/1103 | 1 | fixes | Maven plugin does not work if implementation-cdi module is in the classpath | Because in that case, it picks up the `io.smallrye.graphql.cdi.CdiLookupService` and tries to use that for injection validation (the `Bootstrap` class validates that all operation classes are beans), but it does not work because the CDI container is not properly initialized. We need to skip injection validation in the Maven plugin.
Workaround: set `test.skip.injection.validation=true` as a system property when executing the plugin | c68ae4a0758e6bd5fe8064fd991141618303b23d | b3ebdc314b15824962f9b7f751a78824fb4087d7 | https://github.com/smallrye/smallrye-graphql/compare/c68ae4a0758e6bd5fe8064fd991141618303b23d...b3ebdc314b15824962f9b7f751a78824fb4087d7 | diff --git a/server/implementation-cdi/src/test/java/io/smallrye/graphql/execution/SchemaTest.java b/server/implementation-cdi/src/test/java/io/smallrye/graphql/execution/SchemaTest.java
index 24a4b48c..8801722d 100644
--- a/server/implementation-cdi/src/test/java/io/smallrye/graphql/execution/SchemaTest.java
+++ b/server/implementation-cdi/src/test/java/io/smallrye/graphql/execution/SchemaTest.java
@@ -9,7 +9,6 @@ import java.util.Scanner;
import org.jboss.jandex.IndexView;
import org.jboss.logging.Logger;
-import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -30,22 +29,14 @@ public class SchemaTest {
@BeforeEach
public void init() {
- // in a unit test we don't have injection available, this is a hack needed to tell the Bootstrap class
- // that it should not verify injection availability
- System.setProperty("test.skip.injection.validation", "true");
IndexView index = Indexer.getTCKIndex();
this.schema = SchemaBuilder.build(index);
assertNotNull(schema);
}
- @AfterEach
- public void skipInjectionValidationCleanup() {
- System.clearProperty("test.skip.injection.validation");
- }
-
@Test
public void testSchemaModelCreation() {
- GraphQLSchema graphQLSchema = Bootstrap.bootstrap(schema);
+ GraphQLSchema graphQLSchema = Bootstrap.bootstrap(schema, false, true);
assertNotNull(graphQLSchema);
String schemaString = new SchemaPrinter().print(graphQLSchema);
assertNotNull(schemaString);
diff --git a/server/implementation/src/main/java/io/smallrye/graphql/bootstrap/Bootstrap.java b/server/implementation/src/main/java/io/smallrye/graphql/bootstrap/Bootstrap.java
index b73fe0b9..565e8e6f 100644
--- a/server/implementation/src/main/java/io/smallrye/graphql/bootstrap/Bootstrap.java
+++ b/server/implementation/src/main/java/io/smallrye/graphql/bootstrap/Bootstrap.java
@@ -99,12 +99,16 @@ public class Bootstrap {
private final ClassloadingService classloadingService = ClassloadingService.get();
public static GraphQLSchema bootstrap(Schema schema) {
- return bootstrap(schema, false);
+ return bootstrap(schema, false, false);
}
public static GraphQLSchema bootstrap(Schema schema, boolean allowMultipleDeployments) {
+ return bootstrap(schema, allowMultipleDeployments, false);
+ }
+
+ public static GraphQLSchema bootstrap(Schema schema, boolean allowMultipleDeployments, boolean skipInjectionValidation) {
if (schema != null && (schema.hasOperations())) {
- Bootstrap bootstrap = new Bootstrap(schema, allowMultipleDeployments);
+ Bootstrap bootstrap = new Bootstrap(schema, allowMultipleDeployments, skipInjectionValidation);
bootstrap.generateGraphQLSchema();
return bootstrap.graphQLSchema;
} else {
@@ -113,10 +117,12 @@ public class Bootstrap {
}
}
- private Bootstrap(Schema schema, boolean allowMultipleDeployments) {
+ private Bootstrap(Schema schema, boolean allowMultipleDeployments, boolean skipInjectionValidation) {
this.schema = schema;
SmallRyeContext.setSchema(schema, allowMultipleDeployments);
- if (!Boolean.getBoolean("test.skip.injection.validation")) {
+ // setting `skipInjectionValidation` through a system property is not recommended,
+ // but kept for backward compatibility for now
+ if (!Boolean.getBoolean("test.skip.injection.validation") && !skipInjectionValidation) {
verifyInjectionIsAvailable();
}
}
diff --git a/server/implementation/src/test/java/io/smallrye/graphql/schema/SchemaTest.java b/server/implementation/src/test/java/io/smallrye/graphql/schema/SchemaTest.java
index 0514d320..40308cfd 100644
--- a/server/implementation/src/test/java/io/smallrye/graphql/schema/SchemaTest.java
+++ b/server/implementation/src/test/java/io/smallrye/graphql/schema/SchemaTest.java
@@ -13,8 +13,6 @@ import java.util.stream.Stream;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;
import org.jboss.logging.Logger;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import graphql.schema.GraphQLArgument;
@@ -30,25 +28,13 @@ import io.smallrye.graphql.schema.model.Schema;
class SchemaTest {
private static final Logger LOG = Logger.getLogger(SchemaTest.class.getName());
- @BeforeEach
- public void skipInjectionValidation() {
- // in a unit test we don't have injection available, this is a hack needed to tell the Bootstrap class
- // that it should not verify injection availability
- System.setProperty("test.skip.injection.validation", "true");
- }
-
- @AfterEach
- public void skipInjectionValidationCleanup() {
- System.clearProperty("test.skip.injection.validation");
- }
-
@Test
void testSchemaWithDirectives() {
Schema schema = SchemaBuilder
.build(scan(Directive.class, IntArrayTestDirective.class, FieldDirective.class,
TestTypeWithDirectives.class, DirectivesTestApi.class));
assertNotNull(schema);
- GraphQLSchema graphQLSchema = Bootstrap.bootstrap(schema);
+ GraphQLSchema graphQLSchema = Bootstrap.bootstrap(schema, false, true);
assertNotNull(graphQLSchema);
GraphQLDirective typeDirective = graphQLSchema.getDirective("intArrayTestDirective");
diff --git a/tools/gradle-plugin/plugin/src/main/java/io/smallrye/graphql/gradle/tasks/GenerateSchemaTask.java b/tools/gradle-plugin/plugin/src/main/java/io/smallrye/graphql/gradle/tasks/GenerateSchemaTask.java
index 3aac2b7a..26db9898 100644
--- a/tools/gradle-plugin/plugin/src/main/java/io/smallrye/graphql/gradle/tasks/GenerateSchemaTask.java
+++ b/tools/gradle-plugin/plugin/src/main/java/io/smallrye/graphql/gradle/tasks/GenerateSchemaTask.java
@@ -179,7 +179,6 @@ public class GenerateSchemaTask extends DefaultTask {
@TaskAction
public void generateSchema() throws Exception {
this.config = new GradleConfig(includeScalars, includeDirectives, includeSchemaDefinition, includeIntrospectionTypes);
- System.setProperty("test.skip.injection.validation", "true");
ClassLoader classLoader = getClassLoader();
Thread.currentThread().setContextClassLoader(classLoader);
IndexView index = createIndex();
@@ -242,7 +241,7 @@ public class GenerateSchemaTask extends DefaultTask {
private String generateSchema(IndexView index) {
Schema internalSchema = SchemaBuilder.build(index);
- GraphQLSchema graphQLSchema = Bootstrap.bootstrap(internalSchema);
+ GraphQLSchema graphQLSchema = Bootstrap.bootstrap(internalSchema, false, true);
if(graphQLSchema!=null){
return new SchemaPrinter().print(graphQLSchema);
}
diff --git a/tools/maven-plugin/src/main/java/io/smallrye/graphql/mavenplugin/GenerateSchemaMojo.java b/tools/maven-plugin/src/main/java/io/smallrye/graphql/mavenplugin/GenerateSchemaMojo.java
index 1337a2bf..f838a851 100644
--- a/tools/maven-plugin/src/main/java/io/smallrye/graphql/mavenplugin/GenerateSchemaMojo.java
+++ b/tools/maven-plugin/src/main/java/io/smallrye/graphql/mavenplugin/GenerateSchemaMojo.java
@@ -172,7 +172,7 @@ public class GenerateSchemaMojo extends AbstractMojo {
private String generateSchema(IndexView index) {
Schema internalSchema = SchemaBuilder.build(index, mavenConfig.typeAutoNameStrategy);
- GraphQLSchema graphQLSchema = Bootstrap.bootstrap(internalSchema);
+ GraphQLSchema graphQLSchema = Bootstrap.bootstrap(internalSchema, false, true);
if (graphQLSchema != null) {
return new SchemaPrinter().print(graphQLSchema);
} | ['tools/maven-plugin/src/main/java/io/smallrye/graphql/mavenplugin/GenerateSchemaMojo.java', 'tools/gradle-plugin/plugin/src/main/java/io/smallrye/graphql/gradle/tasks/GenerateSchemaTask.java', 'server/implementation/src/test/java/io/smallrye/graphql/schema/SchemaTest.java', 'server/implementation/src/main/java/io/smallrye/graphql/bootstrap/Bootstrap.java', 'server/implementation-cdi/src/test/java/io/smallrye/graphql/execution/SchemaTest.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 1,124,064 | 221,288 | 32,002 | 344 | 1,383 | 242 | 19 | 3 | 437 | 61 | 91 | 3 | 0 | 0 | 1970-01-01T00:27:14 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,093 | smallrye/smallrye-graphql/453/444 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/444 | https://github.com/smallrye/smallrye-graphql/pull/453 | https://github.com/smallrye/smallrye-graphql/pull/453 | 1 | fix | Non-Ascii-Characters sent to server | When we send a non-ASCII character to the GraphQL server, the encoding is being mixed up.
I've looked at the TCK and tried to add a `ö` to the `ScalarTestApi` string `123456789`, but when I add it to the `output.json` it doesn't accept the json. | 276b731a32e67ab53a14ae4256065f95941a0cc5 | cbcd9313009bf69ab4b33daed475e8d2f13c5c86 | https://github.com/smallrye/smallrye-graphql/compare/276b731a32e67ab53a14ae4256065f95941a0cc5...cbcd9313009bf69ab4b33daed475e8d2f13c5c86 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
index df002a75..384b73b0 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
@@ -2,6 +2,7 @@ package io.smallrye.graphql.client.typesafe.impl;
import static java.util.stream.Collectors.joining;
import static javax.json.JsonValue.ValueType.ARRAY;
+import static javax.ws.rs.client.Entity.entity;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static javax.ws.rs.core.Response.Status.Family.SUCCESSFUL;
@@ -14,8 +15,8 @@ import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReaderFactory;
import javax.json.JsonValue;
-import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.StatusType;
@@ -31,6 +32,7 @@ import io.smallrye.graphql.client.typesafe.impl.reflection.TypeInfo;
class GraphQlClientProxy {
private static final Logger log = LoggerFactory.getLogger(GraphQlClientProxy.class);
+ private static final MediaType APPLICATION_JSON_UTF8 = APPLICATION_JSON_TYPE.withCharset("utf-8");
private static final JsonBuilderFactory jsonObjectFactory = Json.createBuilderFactory(null);
private static final JsonReaderFactory jsonReaderFactory = Json.createReaderFactory(null);
@@ -104,9 +106,9 @@ class GraphQlClientProxy {
private String post(String request, MultivaluedMap<String, Object> headers) {
Response response = target
- .request(APPLICATION_JSON_TYPE)
+ .request(APPLICATION_JSON_UTF8)
.headers(headers)
- .post(Entity.json(request));
+ .post(entity(request, APPLICATION_JSON_UTF8));
StatusType status = response.getStatusInfo();
if (status.getFamily() != SUCCESSFUL)
throw new GraphQlClientException("expected successful status code but got " +
diff --git a/client/implementation/src/test/java/test/unit/GraphQlClientFixture.java b/client/implementation/src/test/java/test/unit/GraphQlClientFixture.java
index 434cb81f..26544db1 100644
--- a/client/implementation/src/test/java/test/unit/GraphQlClientFixture.java
+++ b/client/implementation/src/test/java/test/unit/GraphQlClientFixture.java
@@ -1,6 +1,5 @@
package test.unit;
-import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static org.assertj.core.api.BDDAssertions.then;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
@@ -15,6 +14,7 @@ import javax.ws.rs.client.Client;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
@@ -27,14 +27,14 @@ import io.smallrye.graphql.client.typesafe.impl.GraphQlClientBuilderImpl;
class GraphQlClientFixture {
private final Client mockClient = mock(Client.class);
+ private final WebTarget mockWebTarget = mock(WebTarget.class);
private final Invocation.Builder mockInvocationBuilder = mock(Invocation.Builder.class);
private Response response;
+ private Entity<String> entitySent;
GraphQlClientFixture() {
- WebTarget mockWebTarget = mock(WebTarget.class);
-
given(mockClient.target(any(URI.class))).willReturn(mockWebTarget);
- given(mockWebTarget.request(APPLICATION_JSON_TYPE)).willReturn(mockInvocationBuilder);
+ given(mockWebTarget.request(any(MediaType.class))).willReturn(mockInvocationBuilder);
given(mockInvocationBuilder.headers(any())).willReturn(mockInvocationBuilder);
given(mockInvocationBuilder.post(any())).will(i -> response);
}
@@ -66,18 +66,21 @@ class GraphQlClientFixture {
}
String rawQuery() {
- return queryBody(captureRequestEntity(), "query");
+ return queryBody(entitySent().getEntity(), "query");
}
String mutation() {
- return queryBody(captureRequestEntity(), "mutation").replace('\\"', '\\'');
+ return queryBody(entitySent().getEntity(), "mutation").replace('\\"', '\\'');
}
- private String captureRequestEntity() {
- @SuppressWarnings("unchecked")
- ArgumentCaptor<Entity<String>> captor = ArgumentCaptor.forClass(Entity.class);
- BDDMockito.then(mockInvocationBuilder).should().post(captor.capture());
- return captor.getValue().getEntity();
+ private Entity<String> entitySent() {
+ if (entitySent == null) {
+ @SuppressWarnings("unchecked")
+ ArgumentCaptor<Entity<String>> captor = ArgumentCaptor.forClass(Entity.class);
+ BDDMockito.then(mockInvocationBuilder).should().post(captor.capture());
+ entitySent = captor.getValue();
+ }
+ return entitySent;
}
private String queryBody(String response, String operation) {
@@ -100,10 +103,23 @@ class GraphQlClientFixture {
}
MultivaluedMap<String, Object> sentHeaders() {
+ MultivaluedMap<String, Object> map = captureExplicitHeaders();
+ map.putSingle("Accept", captureAcceptHeader());
+ map.putSingle("Content-Type", entitySent().getMediaType());
+ return map;
+ }
+
+ private MultivaluedMap<String, Object> captureExplicitHeaders() {
@SuppressWarnings("unchecked")
ArgumentCaptor<MultivaluedMap<String, Object>> captor = ArgumentCaptor.forClass(MultivaluedMap.class);
BDDMockito.then(mockInvocationBuilder).should().headers(captor.capture());
MultivaluedMap<String, Object> map = captor.getValue();
return (map == null) ? new MultivaluedHashMap<>() : map;
}
+
+ private MediaType captureAcceptHeader() {
+ ArgumentCaptor<MediaType> captor = ArgumentCaptor.forClass(MediaType.class);
+ BDDMockito.then(mockWebTarget).should().request(captor.capture());
+ return captor.getValue();
+ }
}
diff --git a/client/implementation/src/test/java/test/unit/HeaderBehavior.java b/client/implementation/src/test/java/test/unit/HeaderBehavior.java
index be8da39e..2dc58d75 100644
--- a/client/implementation/src/test/java/test/unit/HeaderBehavior.java
+++ b/client/implementation/src/test/java/test/unit/HeaderBehavior.java
@@ -561,4 +561,21 @@ public class HeaderBehavior {
then(fixture.sentHeader("H5")).isEqualTo("V5");
then(fixture.sentHeader("overwrite")).isEqualTo("sub");
}
+
+ @GraphQlClientApi
+ interface SimpleApi {
+ String greeting(String target);
+ }
+
+ @Test
+ public void shouldAddCharsetRequestAndResponseHeaders() {
+ fixture.returnsData("'greeting':'dummy-greeting'");
+ SimpleApi api = fixture.builder().build(SimpleApi.class);
+
+ api.greeting("foo");
+
+ then(fixture.query()).isEqualTo("greeting(target: 'foo')");
+ then(fixture.sentHeader("Content-Type")).hasToString("application/json;charset=utf-8");
+ then(fixture.sentHeader("Accept")).hasToString("application/json;charset=utf-8");
+ }
} | ['client/implementation/src/test/java/test/unit/HeaderBehavior.java', 'client/implementation/src/test/java/test/unit/GraphQlClientFixture.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 548,208 | 107,358 | 15,661 | 184 | 431 | 78 | 8 | 1 | 248 | 45 | 66 | 3 | 0 | 0 | 1970-01-01T00:26:42 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,094 | smallrye/smallrye-graphql/451/450 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/450 | https://github.com/smallrye/smallrye-graphql/pull/451 | https://github.com/smallrye/smallrye-graphql/pull/451 | 1 | fix | ClassCastException with Set as parameter | Hello,
as already discussed with @t1, there is an issue with GraphQl client.
We try to create a GraphQl mutation (GraphQl version 1.0.11) which receives a Set as parameter.
`void add(Set<AnyObjectType> values);`
It seems the RequestBuilder#buildParam() method should not use the specific List type but a more general Collection type? This leads to the following stack:
```
java.lang.ClassCastException: class java.util.ImmutableCollections$SetN cannot be cast to class java.util.List (java.util.ImmutableCollections$SetN and java.util.List are in module java.base of loader 'bootstrap')
at io.smallrye.graphql.client.typesafe.impl.RequestBuilder.buildParam(RequestBuilder.java:48)
at io.smallrye.graphql.client.typesafe.impl.RequestBuilder.appendParam(RequestBuilder.java:39)
at io.smallrye.graphql.client.typesafe.impl.RequestBuilder.build(RequestBuilder.java:30)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientProxy.request(GraphQlClientProxy.java:60)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientProxy.invoke(GraphQlClientProxy.java:46)
at io.smallrye.graphql.client.typesafe.impl.GraphQlClientBuilderImpl.lambda$build$0(GraphQlClientBuilderImpl.java:47)
at test.integration.customer.$Proxy26.addMarketingPermissions(Unknown Source)
at test.integration.customer.GraphQlCustomerAT$WhenMarketingPermissions.shouldAddPermissionsForAuthorizedUser(GraphQlCustomerAT.java:117)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.platform.commons.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:688)
at org.junit.jupiter.engine.execution.MethodInvocation.proceed(MethodInvocation.java:60)
at org.junit.jupiter.engine.execution.InvocationInterceptorChain$ValidatingInvocation.proceed(InvocationInterceptorChain.java:131)
at org.junit.jupiter.engine.extension.TimeoutExtension.intercept(TimeoutExtension.java:149)
at org.junit.jupiter.engine.extension.TimeoutExtension.interceptTestableMethod(TimeoutExtension.java:140)
at org.junit.jupiter.engine.extension.TimeoutExtension.interceptTestMethod(TimeoutExtension.java:84)
at org.junit.jupiter.engine.execution.ExecutableInvoker$ReflectiveInterceptorCall.lambda$ofVoidMethod$0(ExecutableInvoker.java:115)
at org.junit.jupiter.engine.execution.ExecutableInvoker.lambda$invoke$0(ExecutableInvoker.java:105)
at org.junit.jupiter.engine.execution.InvocationInterceptorChain$InterceptedInvocation.proceed(InvocationInterceptorChain.java:106)
at org.junit.jupiter.engine.execution.InvocationInterceptorChain.proceed(InvocationInterceptorChain.java:64)
at org.junit.jupiter.engine.execution.InvocationInterceptorChain.chainAndInvoke(InvocationInterceptorChain.java:45)
at org.junit.jupiter.engine.execution.InvocationInterceptorChain.invoke(InvocationInterceptorChain.java:37)
at org.junit.jupiter.engine.execution.ExecutableInvoker.invoke(ExecutableInvoker.java:104)
at org.junit.jupiter.engine.execution.ExecutableInvoker.invoke(ExecutableInvoker.java:98)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$invokeTestMethod$6(TestMethodTestDescriptor.java:210)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeTestMethod(TestMethodTestDescriptor.java:206)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:131)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:65)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:139)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:32)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:51)
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:108)
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:88)
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.lambda$execute$0(EngineExecutionOrchestrator.java:54)
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.withInterceptedStreams(EngineExecutionOrchestrator.java:67)
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:52)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:96)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:75)
at com.intellij.junit5.JUnit5IdeaTestRunner.startRunnerWithArgs(JUnit5IdeaTestRunner.java:69)
at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70)
```
| 5d752a165bd2199c00637accba67cb4c2e85e9ce | 7a864830c7abbe2fff5c7192d32584eafb9073e3 | https://github.com/smallrye/smallrye-graphql/compare/5d752a165bd2199c00637accba67cb4c2e85e9ce...7a864830c7abbe2fff5c7192d32584eafb9073e3 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
index bdbfe1cf..df002a75 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
@@ -1,6 +1,7 @@
package io.smallrye.graphql.client.typesafe.impl;
import static java.util.stream.Collectors.joining;
+import static javax.json.JsonValue.ValueType.ARRAY;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static javax.ws.rs.core.Response.Status.Family.SUCCESSFUL;
@@ -8,7 +9,6 @@ import java.io.StringReader;
import java.util.Stack;
import javax.json.Json;
-import javax.json.JsonArray;
import javax.json.JsonBuilderFactory;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
@@ -123,13 +123,15 @@ class GraphQlClientProxy {
private JsonObject readResponse(String request, String response) {
JsonObject responseJson = jsonReaderFactory.createReader(new StringReader(response)).readObject();
- if (responseJson.containsKey("errors") && !isEmpty(responseJson.getJsonArray("errors")))
+ if (hasErrors(responseJson))
throw new GraphQlClientException("errors from service: " + responseJson.getJsonArray("errors") + ":\\n " + request);
return responseJson;
}
- private boolean isEmpty(JsonArray array) {
- return array == null || array.isEmpty();
+ private boolean hasErrors(JsonObject responseJson) {
+ return responseJson.containsKey("errors")
+ && responseJson.get("errors").getValueType() == ARRAY
+ && !responseJson.getJsonArray("errors").isEmpty();
}
private JsonValue getData(MethodInfo method, JsonObject responseJson) {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
index 4b01beee..338a8732 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
@@ -2,6 +2,9 @@ package io.smallrye.graphql.client.typesafe.impl;
import static java.util.stream.Collectors.toList;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.List;
import io.smallrye.graphql.client.typesafe.api.Header;
@@ -45,11 +48,19 @@ class RequestBuilder {
else if (type.isScalar())
buildScalarParam(type, value);
else if (type.isCollection())
- buildArrayParam(type.getItemType(), (List<?>) value);
+ buildArrayParam(type.getItemType(), asList(value));
else
buildObjectParam(type, value);
}
+ private List<?> asList(Object value) {
+ if (value instanceof List)
+ return (List<?>) value;
+ if (value.getClass().isArray())
+ return Arrays.asList((Object[]) value);
+ return new ArrayList<>((Collection<?>) value);
+ }
+
private void buildScalarParam(TypeInfo type, Object value) {
boolean quoted = !unquoted(type);
if (quoted)
diff --git a/client/implementation/src/test/java/test/unit/ParametersBehavior.java b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
index 5035171b..e5047772 100644
--- a/client/implementation/src/test/java/test/unit/ParametersBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
@@ -3,8 +3,13 @@ package test.unit;
import static java.util.Arrays.asList;
import static org.assertj.core.api.BDDAssertions.then;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.HashSet;
import java.util.List;
import java.util.Objects;
+import java.util.Queue;
+import java.util.Set;
import org.junit.jupiter.api.Test;
@@ -135,7 +140,7 @@ public class ParametersBehavior {
@GraphQlClientApi
interface ArrayParamApi {
- boolean greetings(List<String> greets);
+ boolean greetings(String[] greets);
}
@Test
@@ -143,6 +148,70 @@ public class ParametersBehavior {
fixture.returnsData("'greetings':true");
ArrayParamApi api = fixture.builder().build(ArrayParamApi.class);
+ boolean success = api.greetings(new String[] { "hi", "ho" });
+
+ then(fixture.query()).isEqualTo("greetings(greets: ['hi', 'ho'])");
+ then(success).isTrue();
+ }
+
+ @GraphQlClientApi
+ interface ListParamApi {
+ boolean greetings(List<String> greets);
+ }
+
+ @Test
+ public void shouldCallListParamQuery() {
+ fixture.returnsData("'greetings':true");
+ ListParamApi api = fixture.builder().build(ListParamApi.class);
+
+ boolean success = api.greetings(asList("hi", "ho"));
+
+ then(fixture.query()).isEqualTo("greetings(greets: ['hi', 'ho'])");
+ then(success).isTrue();
+ }
+
+ @GraphQlClientApi
+ interface SetParamApi {
+ boolean greetings(Set<String> greets);
+ }
+
+ @Test
+ public void shouldCallSetParamQuery() {
+ fixture.returnsData("'greetings':true");
+ SetParamApi api = fixture.builder().build(SetParamApi.class);
+
+ boolean success = api.greetings(new HashSet<>(asList("hi", "ho")));
+
+ then(fixture.query()).isEqualTo("greetings(greets: ['hi', 'ho'])");
+ then(success).isTrue();
+ }
+
+ @GraphQlClientApi
+ interface QueueParamApi {
+ boolean greetings(Queue<String> greets);
+ }
+
+ @Test
+ public void shouldCallQueueParamQuery() {
+ fixture.returnsData("'greetings':true");
+ QueueParamApi api = fixture.builder().build(QueueParamApi.class);
+
+ boolean success = api.greetings(new ArrayDeque<>(asList("hi", "ho")));
+
+ then(fixture.query()).isEqualTo("greetings(greets: ['hi', 'ho'])");
+ then(success).isTrue();
+ }
+
+ @GraphQlClientApi
+ interface CollectionParamApi {
+ boolean greetings(Collection<String> greets);
+ }
+
+ @Test
+ public void shouldCallCollectionParamQuery() {
+ fixture.returnsData("'greetings':true");
+ CollectionParamApi api = fixture.builder().build(CollectionParamApi.class);
+
boolean success = api.greetings(asList("hi", "ho"));
then(fixture.query()).isEqualTo("greetings(greets: ['hi', 'ho'])");
@@ -150,14 +219,14 @@ public class ParametersBehavior {
}
@GraphQlClientApi
- interface ObjectArrayParamApi {
+ interface ObjectListParamApi {
boolean greetings(List<Greeting> greets);
}
@Test
- public void shouldCallObjectArrayParamQuery() {
+ public void shouldCallObjectListParamQuery() {
fixture.returnsData("'greetings':true");
- ObjectArrayParamApi api = fixture.builder().build(ObjectArrayParamApi.class);
+ ObjectListParamApi api = fixture.builder().build(ObjectListParamApi.class);
boolean success = api.greetings(asList(new Greeting("hi", 5), new Greeting("ho", 3)));
@@ -166,26 +235,26 @@ public class ParametersBehavior {
}
@GraphQlClientApi
- interface ArrayObjectParamApi {
- boolean foo(ArrayObject bar);
+ interface ListObjectParamApi {
+ boolean foo(ListObject bar);
}
- private static class ArrayObject {
+ private static class ListObject {
List<String> texts;
int count;
- ArrayObject(List<String> texts, int count) {
+ ListObject(List<String> texts, int count) {
this.texts = texts;
this.count = count;
}
}
@Test
- public void shouldCallArrayObjectParamQuery() {
+ public void shouldCallListObjectParamQuery() {
fixture.returnsData("'foo':true");
- ArrayObjectParamApi api = fixture.builder().build(ArrayObjectParamApi.class);
+ ListObjectParamApi api = fixture.builder().build(ListObjectParamApi.class);
- boolean success = api.foo(new ArrayObject(asList("hi", "ho"), 3));
+ boolean success = api.foo(new ListObject(asList("hi", "ho"), 3));
then(fixture.query()).isEqualTo("foo(bar: {texts: ['hi', 'ho'], count: 3})");
then(success).isTrue(); | ['client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java', 'client/implementation/src/test/java/test/unit/ParametersBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 547,749 | 107,269 | 15,648 | 184 | 1,056 | 206 | 23 | 2 | 9,340 | 245 | 1,907 | 97 | 0 | 1 | 1970-01-01T00:26:42 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,095 | smallrye/smallrye-graphql/395/394 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/394 | https://github.com/smallrye/smallrye-graphql/pull/395 | https://github.com/smallrye/smallrye-graphql/pull/395 | 1 | fix | Typesafe Client: send Booleans not as String | E.g. mutations containing a `Boolean` non-primitive results in a String `true` or `false` but not in the proper literals. | 9b81d85734bf837fbeaeb9361034176f887e0920 | 38acf28b7373c3ce0a68acbc137178d181fcfaa2 | https://github.com/smallrye/smallrye-graphql/compare/9b81d85734bf837fbeaeb9361034176f887e0920...38acf28b7373c3ce0a68acbc137178d181fcfaa2 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
index 3b6b5efb..4b01beee 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java
@@ -61,6 +61,7 @@ class RequestBuilder {
public boolean unquoted(TypeInfo type) {
return type.isPrimitive()
+ || Boolean.class.isAssignableFrom(type.getRawType())
|| Number.class.isAssignableFrom(type.getRawType())
|| type.isEnum();
}
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
index b9cc7915..127e7247 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
@@ -126,6 +126,7 @@ public class TypeInfo {
public boolean isScalar() {
return isPrimitive()
|| Number.class.isAssignableFrom(getRawType())
+ || Boolean.class.isAssignableFrom(getRawType())
|| isEnum()
|| CharSequence.class.isAssignableFrom(getRawType())
|| Character.class.equals(getRawType()) // has a valueOf(char), not valueOf(String)
diff --git a/client/implementation/src/test/java/test/unit/MutationBehavior.java b/client/implementation/src/test/java/test/unit/MutationBehavior.java
index 026cd11c..532da821 100644
--- a/client/implementation/src/test/java/test/unit/MutationBehavior.java
+++ b/client/implementation/src/test/java/test/unit/MutationBehavior.java
@@ -239,4 +239,106 @@ public class MutationBehavior {
then(fixture.mutation()).isEqualTo("say(greeting: {text: 'one', someEnum: ONE}) {text count}");
then(greeting).isEqualTo(new Greeting("ho", 3));
}
+
+ private static class PrimitiveTypesClass {
+ boolean b = true;
+ char c = 'a';
+ byte y = 0x7;
+ short s = 0xff;
+ int i = 123456;
+ long l = 987654321L;
+ float f = 12.34f;
+ double d = 56.78d;
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ PrimitiveTypesClass that = (PrimitiveTypesClass) o;
+ return this.b == that.b
+ && this.c == that.c
+ && this.y == that.y
+ && this.s == that.s
+ && this.i == that.i
+ && this.l == that.l
+ && this.f == that.f
+ && this.d == that.d;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(b, c, y, s, i, l, f, d);
+ }
+ }
+
+ @GraphQlClientApi
+ interface MutationWithPrimitivesApi {
+ @Mutation
+ String run(PrimitiveTypesClass primitives);
+ }
+
+ @Test
+ public void shouldCallMutationWithPrimitives() {
+ fixture.returnsData("'run':'okay'");
+ MutationWithPrimitivesApi api = fixture.builder().build(MutationWithPrimitivesApi.class);
+
+ String result = api.run(new PrimitiveTypesClass());
+
+ then(fixture.mutation())
+ .isEqualTo("run(primitives: {b: true, c: a, y: 7, s: 255, i: 123456, l: 987654321, f: 12.34, d: 56.78})");
+ then(result).isEqualTo("okay");
+ }
+
+ private static class PrimitiveWrapperTypesClass {
+ Boolean b = true;
+ Character c = 'a';
+ Byte y = 0x7;
+ Short s = 0xff;
+ Integer i = 123456;
+ Long l = 987654321L;
+ Float f = 12.34f;
+ Double d = 56.78d;
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ PrimitiveWrapperTypesClass that = (PrimitiveWrapperTypesClass) o;
+ return this.b.equals(that.b)
+ && this.c.equals(that.c)
+ && this.y.equals(that.y)
+ && this.s.equals(that.s)
+ && this.i.equals(that.i)
+ && this.l.equals(that.l)
+ && this.f.equals(that.f)
+ && this.d.equals(that.d);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(b, c, y, s, i, l, f, d);
+ }
+ }
+
+ @GraphQlClientApi
+ interface MutationWithPrimitiveWrappersApi {
+ @Mutation
+ String run(PrimitiveWrapperTypesClass primitives);
+ }
+
+ @Test
+ public void shouldCallMutationWithPrimitiveWrappers() {
+ fixture.returnsData("'run':'okay'");
+ MutationWithPrimitiveWrappersApi api = fixture.builder().build(MutationWithPrimitiveWrappersApi.class);
+
+ String result = api.run(new PrimitiveWrapperTypesClass());
+
+ then(fixture.mutation())
+ .isEqualTo("run(primitives: {b: true, c: 'a', y: 7, s: 255, i: 123456, l: 987654321, f: 12.34, d: 56.78})");
+ then(result).isEqualTo("okay");
+ }
}
diff --git a/client/implementation/src/test/java/test/unit/NestedBehavior.java b/client/implementation/src/test/java/test/unit/NestedBehavior.java
index ef6867bd..e072d238 100644
--- a/client/implementation/src/test/java/test/unit/NestedBehavior.java
+++ b/client/implementation/src/test/java/test/unit/NestedBehavior.java
@@ -145,14 +145,16 @@ public class NestedBehavior {
private static class Greeting {
String text;
int code;
+ Boolean successful;
@SuppressWarnings("unused")
Greeting() {
}
- Greeting(String text, int code) {
+ Greeting(String text, int code, Boolean successful) {
this.text = text;
this.code = code;
+ this.successful = successful;
}
@Override
@@ -162,24 +164,24 @@ public class NestedBehavior {
if (o == null || getClass() != o.getClass())
return false;
Greeting greeting = (Greeting) o;
- return code == greeting.code && text.equals(greeting.text);
+ return code == greeting.code && text.equals(greeting.text) && successful == greeting.successful;
}
@Override
public int hashCode() {
- return Objects.hash(text, code);
+ return Objects.hash(text, code, successful);
}
}
@Test
public void shouldCallObjectQuery() {
- fixture.returnsData("'greeting':{'text':'foo','code':5}");
+ fixture.returnsData("'greeting':{'text':'foo','code':5, 'successful':true}");
ObjectApi api = fixture.builder().build(ObjectApi.class);
Greeting greeting = api.greeting();
- then(fixture.query()).isEqualTo("greeting {text code}");
- then(greeting).isEqualTo(new Greeting("foo", 5));
+ then(fixture.query()).isEqualTo("greeting {text code successful}");
+ then(greeting).isEqualTo(new Greeting("foo", 5, true));
}
@Test
@@ -222,15 +224,19 @@ public class NestedBehavior {
@Test
public void shouldCallObjectListQuery() {
- fixture.returnsData("'greetings':[{'text':'a','code':1},{'text':'b','code':2}]");
+ fixture.returnsData("'greetings':["
+ + "{'text':'a','code':1},"
+ + "{'text':'b','code':2,'successful':true},"
+ + "{'text':'c','code':3,'successful':false}]");
ObjectListApi api = fixture.builder().build(ObjectListApi.class);
List<Greeting> greeting = api.greetings();
- then(fixture.query()).isEqualTo("greetings {text code}");
+ then(fixture.query()).isEqualTo("greetings {text code successful}");
then(greeting).containsExactly(
- new Greeting("a", 1),
- new Greeting("b", 2));
+ new Greeting("a", 1, null),
+ new Greeting("b", 2, true),
+ new Greeting("c", 3, false));
}
@Test
@@ -318,9 +324,9 @@ public class NestedBehavior {
GreetingContainer container = api.container();
- then(fixture.query()).isEqualTo("container {greeting {text code} count}");
+ then(fixture.query()).isEqualTo("container {greeting {text code successful} count}");
then(container).isEqualTo(new GreetingContainer(
- new Greeting("a", 1), 3));
+ new Greeting("a", 1, null), 3));
}
@GraphQlClientApi
@@ -366,9 +372,9 @@ public class NestedBehavior {
GreetingsContainer container = api.container();
- then(fixture.query()).isEqualTo("container {greetings {text code} count}");
+ then(fixture.query()).isEqualTo("container {greetings {text code successful} count}");
then(container).isEqualTo(new GreetingsContainer(
- asList(new Greeting("a", 1), new Greeting("b", 2)), 3));
+ asList(new Greeting("a", 1, null), new Greeting("b", 2, null)), 3));
}
@GraphQlClientApi
@@ -442,9 +448,9 @@ public class NestedBehavior {
WrappedGreetingContainer container = api.container();
- then(fixture.query()).isEqualTo("container {greeting {value {text code}} count}");
+ then(fixture.query()).isEqualTo("container {greeting {value {text code successful}} count}");
then(container).isEqualTo(new WrappedGreetingContainer(
- new Wrapper<>(new Greeting("a", 1)), 3));
+ new Wrapper<>(new Greeting("a", 1, null)), 3));
}
@GraphQlClientApi
@@ -611,8 +617,8 @@ public class NestedBehavior {
Sub sub = api.call();
- then(fixture.query()).isEqualTo("call {greeting {text code} count}");
- then(sub.greeting).isEqualTo(new Greeting("a", 1));
+ then(fixture.query()).isEqualTo("call {greeting {text code successful} count}");
+ then(sub.greeting).isEqualTo(new Greeting("a", 1, null));
then(sub.count).isEqualTo(3);
}
diff --git a/client/implementation/src/test/java/test/unit/ParametersBehavior.java b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
index 064c83f4..5035171b 100644
--- a/client/implementation/src/test/java/test/unit/ParametersBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ParametersBehavior.java
@@ -57,10 +57,26 @@ public class ParametersBehavior {
}
@GraphQlClientApi
- interface BooleanParamApi {
+ interface BoolParamApi {
String greeting(boolean really);
}
+ @Test
+ public void shouldCallBoolParamQuery() {
+ fixture.returnsData("'greeting':'ho'");
+ BoolParamApi api = fixture.builder().build(BoolParamApi.class);
+
+ String greeting = api.greeting(true);
+
+ then(fixture.query()).isEqualTo("greeting(really: true)");
+ then(greeting).isEqualTo("ho");
+ }
+
+ @GraphQlClientApi
+ interface BooleanParamApi {
+ String greeting(Boolean really);
+ }
+
@Test
public void shouldCallBooleanParamQuery() {
fixture.returnsData("'greeting':'ho'"); | ['client/implementation/src/test/java/test/unit/MutationBehavior.java', 'client/implementation/src/test/java/test/unit/ParametersBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/RequestBuilder.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java', 'client/implementation/src/test/java/test/unit/NestedBehavior.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 529,935 | 103,861 | 15,198 | 181 | 133 | 20 | 2 | 2 | 121 | 19 | 30 | 1 | 0 | 0 | 1970-01-01T00:26:39 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,092 | smallrye/smallrye-graphql/487/486 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/486 | https://github.com/smallrye/smallrye-graphql/pull/487 | https://github.com/smallrye/smallrye-graphql/pull/487 | 1 | fixes | Servlet module does not scan WEB-INF/lib JARs for MP GraphQL components | If annotated POJOs are located in a WAR's WEB-INF/classes directory, then everything works fine. But when running in Open Liberty with a WAR file that contains query/mutation/entity classes, etc in a JAR file in the WEB-INF/lib directory, those classes are not processed. Since these JARs are part of the classpath of the web application, they should also be scanned for MP GraphQL components. | 7ae2014bef6f8f808aecfc7e2a1d8d36f1b758e6 | 8a3aac8e1d9bd55986a26b616e7d1009ba82fbb9 | https://github.com/smallrye/smallrye-graphql/compare/7ae2014bef6f8f808aecfc7e2a1d8d36f1b758e6...8a3aac8e1d9bd55986a26b616e7d1009ba82fbb9 | diff --git a/server/implementation-servlet/src/main/java/io/smallrye/graphql/servlet/IndexInitializer.java b/server/implementation-servlet/src/main/java/io/smallrye/graphql/servlet/IndexInitializer.java
index fe13a935..272751c4 100644
--- a/server/implementation-servlet/src/main/java/io/smallrye/graphql/servlet/IndexInitializer.java
+++ b/server/implementation-servlet/src/main/java/io/smallrye/graphql/servlet/IndexInitializer.java
@@ -133,7 +133,7 @@ public class IndexInitializer {
if (fileName.endsWith(DOT_CLASS)) {
SmallRyeGraphQLServletLogging.log.processingFile(fileName);
indexer.index(is);
- } else if (fileName.endsWith(DOT_WAR)) {
+ } else if (fileName.endsWith(DOT_WAR) || fileName.endsWith(DOT_JAR)) {
// necessary because of the thorntail arquillian adapter
processJar(is, indexer);
} | ['server/implementation-servlet/src/main/java/io/smallrye/graphql/servlet/IndexInitializer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 551,449 | 108,013 | 15,717 | 184 | 129 | 34 | 2 | 1 | 394 | 63 | 85 | 1 | 0 | 0 | 1970-01-01T00:26:43 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,098 | smallrye/smallrye-graphql/333/332 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/332 | https://github.com/smallrye/smallrye-graphql/pull/333 | https://github.com/smallrye/smallrye-graphql/pull/333 | 1 | fix | Typesafe client: inherit `@Header`/`@AuthorizationHeader` annotations | Annotations on the parent of a `@GraphQlClientApi` interface are not recognized. | c179e7bae6371a5ef258aaabe52946274693b33c | 16ddf61f0ef7c27b27cdaf903450c4a4e529d003 | https://github.com/smallrye/smallrye-graphql/compare/c179e7bae6371a5ef258aaabe52946274693b33c...16ddf61f0ef7c27b27cdaf903450c4a4e529d003 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
index b0674489..e27f5bd3 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java
@@ -42,7 +42,7 @@ public class GraphQlClientBuilderImpl implements GraphQlClientBuilder {
WebTarget webTarget = client.target(resolveEndpoint(apiClass));
GraphQlClientProxy graphQlClient = new GraphQlClientProxy(webTarget);
return apiClass.cast(Proxy.newProxyInstance(apiClass.getClassLoader(), new Class<?>[] { apiClass },
- (proxy, method, args) -> graphQlClient.invoke(MethodInfo.of(method, args))));
+ (proxy, method, args) -> graphQlClient.invoke(apiClass, MethodInfo.of(method, args))));
}
private void readConfig(GraphQlClientApi annotation) {
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
index c6e3b6a2..bd200d63 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java
@@ -41,8 +41,8 @@ class GraphQlClientProxy {
this.target = target;
}
- Object invoke(MethodInfo method) {
- MultivaluedMap<String, Object> headers = new HeaderBuilder(method).build();
+ Object invoke(Class<?> api, MethodInfo method) {
+ MultivaluedMap<String, Object> headers = new HeaderBuilder(api, method).build();
String request = request(method);
log.info("request graphql: {}", request);
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/HeaderBuilder.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/HeaderBuilder.java
index 84ff32fa..85d7d721 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/HeaderBuilder.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/HeaderBuilder.java
@@ -20,14 +20,16 @@ import io.smallrye.graphql.client.typesafe.impl.reflection.MethodResolver;
import io.smallrye.graphql.client.typesafe.impl.reflection.TypeInfo;
public class HeaderBuilder {
+ private final Class<?> api;
private final MethodInfo method;
- public HeaderBuilder(MethodInfo method) {
+ public HeaderBuilder(Class<?> api, MethodInfo method) {
+ this.api = api;
this.method = method;
}
public MultivaluedMap<String, Object> build() {
- MultivaluedMap<String, Object> headers = method.getResolvedAnnotations(Header.class)
+ MultivaluedMap<String, Object> headers = method.getResolvedAnnotations(api, Header.class)
.map(header -> new SimpleEntry<>(header.name(), resolveValue(header)))
.collect(toMultivaluedMap());
method.parameters()
@@ -36,7 +38,7 @@ public class HeaderBuilder {
Header header = parameter.getAnnotations(Header.class)[0];
headers.add(header.name(), parameter.getValue());
});
- method.getResolvedAnnotations(AuthorizationHeader.class)
+ method.getResolvedAnnotations(api, AuthorizationHeader.class)
.findFirst()
.map(header -> resolveAuthHeader(method.getDeclaringType(), header))
.ifPresent(auth -> headers.add("Authorization", auth));
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/MethodInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/MethodInfo.java
index 71a6d0c2..4b5dbbab 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/MethodInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/MethodInfo.java
@@ -104,12 +104,20 @@ public class MethodInfo {
return type;
}
- public <A extends Annotation> Stream<A> getResolvedAnnotations(Class<A> type) {
+ public <A extends Annotation> Stream<A> getResolvedAnnotations(Class<?> declaring, Class<A> type) {
return Stream.concat(resolveAnnotations(method, type),
- resolveAnnotations(method.getDeclaringClass(), type))
+ resolveInheritedAnnotations(declaring, type))
.filter(Objects::nonNull);
}
+ private <A extends Annotation> Stream<A> resolveInheritedAnnotations(Class<?> declaring, Class<A> type) {
+ Stream<A> stream = resolveAnnotations(declaring, type);
+ for (Class<?> i : declaring.getInterfaces()) {
+ stream = Stream.concat(stream, resolveInheritedAnnotations(i, type));
+ }
+ return stream;
+ }
+
private static <A extends Annotation> Stream<A> resolveAnnotations(AnnotatedElement annotatedElement, Class<A> type) {
return Stream.concat(Stream.of(annotatedElement.getAnnotationsByType(type)),
resolveStereotypes(annotatedElement.getAnnotations(), type));
diff --git a/client/implementation/src/test/java/test/unit/AuthorizationHeaderBehavior.java b/client/implementation/src/test/java/test/unit/AuthorizationHeaderBehavior.java
index d062450d..eed17cdb 100644
--- a/client/implementation/src/test/java/test/unit/AuthorizationHeaderBehavior.java
+++ b/client/implementation/src/test/java/test/unit/AuthorizationHeaderBehavior.java
@@ -85,7 +85,10 @@ public class AuthorizationHeaderBehavior {
@GraphQlClientApi
@AuthorizationHeader(confPrefix = "*")
- public interface InheritedAuthorizationHeadersApi {
+ public interface InheritedAuthorizationHeadersApi extends BaseAuthorizationHeadersApi {
+ }
+
+ public interface BaseAuthorizationHeadersApi {
@SuppressWarnings("UnusedReturnValue")
String greeting();
}
@@ -104,7 +107,7 @@ public class AuthorizationHeaderBehavior {
@GraphQlClientApi(configKey = "foo")
@AuthorizationHeader
- public interface InheritedConfigKeyAuthorizationHeadersApi {
+ public interface ConfigKeyAuthorizationHeadersApi {
@SuppressWarnings("UnusedReturnValue")
String greeting();
}
@@ -113,8 +116,8 @@ public class AuthorizationHeaderBehavior {
public void shouldAddInheritedConfigKeyAuthorizationHeader() {
withCredentials("foo/mp-graphql/", () -> {
fixture.returnsData("'greeting':'dummy-greeting'");
- InheritedConfigKeyAuthorizationHeadersApi api = fixture.builder()
- .build(InheritedConfigKeyAuthorizationHeadersApi.class);
+ ConfigKeyAuthorizationHeadersApi api = fixture.builder()
+ .build(ConfigKeyAuthorizationHeadersApi.class);
api.greeting();
@@ -136,7 +139,7 @@ public class AuthorizationHeaderBehavior {
}
@Test
- public void shouldAddAuthenticatedHeader() {
+ public void shouldAddStereotypedHeader() {
withCredentials("", () -> {
fixture.returnsData("'greeting':'dummy-greeting'");
AuthenticatedHeaderApi api = fixture.builder().build(AuthenticatedHeaderApi.class);
diff --git a/client/implementation/src/test/java/test/unit/HeaderBehavior.java b/client/implementation/src/test/java/test/unit/HeaderBehavior.java
index 0d66e5f1..be8da39e 100644
--- a/client/implementation/src/test/java/test/unit/HeaderBehavior.java
+++ b/client/implementation/src/test/java/test/unit/HeaderBehavior.java
@@ -525,4 +525,40 @@ public class HeaderBehavior {
then(fixture.sentHeader("H")).isEqualTo("V");
}
+
+ @GraphQlClientApi
+ @Header(name = "H1", constant = "V1")
+ @Header(name = "overwrite", constant = "sub")
+ interface InheritingHeadersApi extends Base, SideBase {
+ }
+
+ @Header(name = "H2", constant = "V2")
+ @Header(name = "overwrite", constant = "super")
+ interface Base extends SuperBase {
+ @Header(name = "H3", constant = "V3")
+ String greeting();
+ }
+
+ @Header(name = "H4", constant = "V4")
+ interface SideBase {
+ }
+
+ @Header(name = "H5", constant = "V5")
+ interface SuperBase {
+ }
+
+ @Test
+ public void shouldAddExtendedHeaders() {
+ fixture.returnsData("'greeting':'dummy-greeting'");
+ InheritingHeadersApi api = fixture.builder().build(InheritingHeadersApi.class);
+
+ api.greeting();
+
+ then(fixture.sentHeader("H1")).isEqualTo("V1");
+ then(fixture.sentHeader("H2")).isEqualTo("V2");
+ then(fixture.sentHeader("H3")).isEqualTo("V3");
+ then(fixture.sentHeader("H4")).isEqualTo("V4");
+ then(fixture.sentHeader("H5")).isEqualTo("V5");
+ then(fixture.sentHeader("overwrite")).isEqualTo("sub");
+ }
} | ['client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientBuilderImpl.java', 'client/implementation/src/test/java/test/unit/AuthorizationHeaderBehavior.java', 'client/implementation/src/test/java/test/unit/HeaderBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/HeaderBuilder.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/MethodInfo.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/GraphQlClientProxy.java'] | {'.java': 6} | 6 | 6 | 0 | 0 | 6 | 496,929 | 97,295 | 14,352 | 178 | 1,644 | 332 | 26 | 4 | 80 | 11 | 19 | 1 | 0 | 0 | 1970-01-01T00:26:35 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
2,090 | smallrye/smallrye-graphql/548/545 | smallrye | smallrye-graphql | https://github.com/smallrye/smallrye-graphql/issues/545 | https://github.com/smallrye/smallrye-graphql/pull/548 | https://github.com/smallrye/smallrye-graphql/pull/548 | 1 | fix | Graphql client handle UUID | Hi, I am writing tests for my graphql endpoint like follows
Endpoint
```java
package graphql
import types.Organization;
import io.quarkus.security.Authenticated;
@GraphQLApi
public class OrganizationResource {
@Authenticated
@Query
public Organization organization(@NonNull UUID id) {
return Organization.builder().id(id).value("test").build()
}
}
```
Graphql type
```java
package types;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import java.util.UUID;
@AllArgsConstructor
@Data
@Builder
@JsonIgnoreProperties(ignoreUnknown = true)
public class Organization {
private UUID id;
private String value;
}
```
Graphql client
```java
@GraphQlClientApi
interface Client {
Organization organization(@NonNull @Name("id") String id);
}
```
Test
```java
public class OrganizationResourceTest {
@Test
public void testOrganizationQuery() {
var id = UUID.randomUUID()
var response = graphql.organization(organization.getId().toString());
assertEquals(id, response.getId());
}
}
```
but I receive this error
```
io.smallrye.graphql.client.typesafe.api.GraphQlClientException: errors from service:
- {"message":"Validation error of type SubSelectionNotAllowed: Sub selection not allowed on leaf type String of field id @ 'organization/id'","locations":[{"line":1,"column":59}],"extensions":{"description":"Sub selection not allowed on leaf type String of field id","validationErrorType":"SubSelectionNotAllowed","queryPath":["organization","id"],"classification":"ValidationError"}}
query: query organization($id: String!) { organization(id: $id) {id {mostSigBits leastSigBits} value } }
```
It seems like the client is not mapping the UUID field to a String field which is how the graphql endpoint is returning fields of that type.
Any suggestion on how to handle this case?
Version: smallrye-graphql-client : 1.0.15
| f1a58345a19344ed6487d0ba92aca9e30348eaed | 4b8e47a19d723c6d4af339c27acda2fd78b0add2 | https://github.com/smallrye/smallrye-graphql/compare/f1a58345a19344ed6487d0ba92aca9e30348eaed...4b8e47a19d723c6d4af339c27acda2fd78b0add2 | diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
index 3364d70a..24335f2d 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java
@@ -27,6 +27,8 @@ class JsonStringReader extends Reader<JsonString> {
if (java.util.Date.class.equals(this.type.getRawType()))
return java.util.Date.from(Instant.parse(value.getString()));
+ if (java.util.UUID.class.equals(this.type.getRawType()))
+ return java.util.UUID.fromString(value.getString());
ConstructionInfo constructor = type.scalarConstructor()
.orElseThrow(() -> new GraphQlClientValueException(location, value));
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
index 6a1ddf9a..418ad1e6 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java
@@ -63,6 +63,7 @@ public class ParameterInfo {
case "String":
case "char":
case "Character":
+ case "UUID":
return "String";
case "boolean":
case "Boolean":
diff --git a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
index 6b9cbbcf..1173d8ed 100644
--- a/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
+++ b/client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java
@@ -126,7 +126,12 @@ public class TypeInfo {
}
private boolean isGraphQlField(Field field) {
- return !isStatic(field.getModifiers()) && !isTransient(field.getModifiers());
+ return !isStatic(field.getModifiers()) && !isSynthetic(field.getModifiers()) && !isTransient(field.getModifiers());
+ }
+
+ /** Modifier.isSynthetic is package private */
+ private static boolean isSynthetic(int mod) {
+ return (mod & 0x00001000) != 0;
}
public boolean isOptional() {
@@ -146,6 +151,7 @@ public class TypeInfo {
|| CharSequence.class.isAssignableFrom(getRawType())
|| Character.class.equals(getRawType()) // has a valueOf(char), not valueOf(String)
|| java.util.Date.class.equals(getRawType())
+ || java.util.UUID.class.equals(getRawType())
|| scalarConstructor().isPresent();
}
diff --git a/client/implementation/src/test/java/test/unit/ScalarBehavior.java b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
index 5a763b11..53047498 100644
--- a/client/implementation/src/test/java/test/unit/ScalarBehavior.java
+++ b/client/implementation/src/test/java/test/unit/ScalarBehavior.java
@@ -15,6 +15,7 @@ import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Date;
+import java.util.UUID;
import org.eclipse.microprofile.graphql.Id;
import org.eclipse.microprofile.graphql.NonNull;
@@ -581,14 +582,26 @@ class ScalarBehavior {
@GraphQlClientApi
interface IdApi {
@Id
- String idea(@Id String in);
+ String idea(
+ @Id String stringId,
+ @Id long primitiveLongId,
+ @Id int primitiveIntId,
+ @Id Long longId,
+ @Id Integer intId,
+ @Id UUID uuidId);
}
@GraphQlClientApi
interface NonNullIdApi {
@NonNull
@Id
- String idea(@NonNull @Id String in);
+ String idea(
+ @NonNull @Id String stringId,
+ @NonNull @Id long primitiveLongId,
+ @NonNull @Id int primitiveIntId,
+ @NonNull @Id Long longId,
+ @NonNull @Id Integer intId,
+ @NonNull @Id UUID uuidId);
}
@GraphQlClientApi
@@ -706,9 +719,22 @@ class ScalarBehavior {
fixture.returnsData("'idea':'out'");
IdApi api = fixture.builder().build(IdApi.class);
- String out = api.idea("in");
-
- then(fixture.query()).isEqualTo("query idea($in: ID) { idea(in: $in) }");
+ String out = api.idea("stringId", 1L, 2, 3L, 4, UUID.randomUUID());
+
+ then(fixture.query()).isEqualTo("query idea(" +
+ "$stringId: ID, " +
+ "$primitiveLongId: ID!, " +
+ "$primitiveIntId: ID!, " +
+ "$longId: ID, " +
+ "$intId: ID, " +
+ "$uuidId: ID) " +
+ "{ idea(" +
+ "stringId: $stringId, " +
+ "primitiveLongId: $primitiveLongId, " +
+ "primitiveIntId: $primitiveIntId, " +
+ "longId: $longId, " +
+ "intId: $intId, " +
+ "uuidId: $uuidId) }");
then(out).isEqualTo("out");
}
@@ -717,9 +743,22 @@ class ScalarBehavior {
fixture.returnsData("'idea':'out'");
NonNullIdApi api = fixture.builder().build(NonNullIdApi.class);
- String out = api.idea("in");
-
- then(fixture.query()).isEqualTo("query idea($in: ID!) { idea(in: $in) }");
+ String out = api.idea("stringId", 1L, 2, 3L, 4, UUID.randomUUID());
+
+ then(fixture.query()).isEqualTo("query idea(" +
+ "$stringId: ID!, " +
+ "$primitiveLongId: ID!, " +
+ "$primitiveIntId: ID!, " +
+ "$longId: ID!, " +
+ "$intId: ID!, " +
+ "$uuidId: ID!) " +
+ "{ idea(" +
+ "stringId: $stringId, " +
+ "primitiveLongId: $primitiveLongId, " +
+ "primitiveIntId: $primitiveIntId, " +
+ "longId: $longId, " +
+ "intId: $intId, " +
+ "uuidId: $uuidId) }");
then(out).isEqualTo("out");
}
@@ -1015,4 +1054,53 @@ class ScalarBehavior {
then(value).isEqualTo(Date.from(out));
}
}
+
+ @GraphQlClientApi
+ interface UuidApi {
+ UUID foo(UUID uuid, @Id UUID id);
+ }
+
+ @GraphQlClientApi
+ interface NestedUuidIdApi {
+ NestedUuidId foo(NestedUuidId uuid);
+ }
+
+ static class NestedUuidId {
+ @Id
+ UUID id;
+ }
+
+ @Nested
+ class UuidBehavior {
+ @Test
+ void shouldCallUuidQuery() {
+ UUID in1 = UUID.randomUUID();
+ UUID in2 = UUID.randomUUID();
+ UUID out = UUID.randomUUID();
+ fixture.returnsData("'foo':'" + out + "'");
+ UuidApi api = fixture.build(UuidApi.class);
+
+ UUID value = api.foo(in1, in2);
+
+ then(fixture.query()).isEqualTo("query foo($uuid: String, $id: ID) { foo(uuid: $uuid, id: $id) }");
+ then(fixture.variables()).isEqualTo("{'uuid':'" + in1 + "','id':'" + in2 + "'}");
+ then(value).isEqualTo(out);
+ }
+
+ @Test
+ void shouldCallNestedUuidIdQuery() {
+ NestedUuidId in = new NestedUuidId();
+ in.id = UUID.randomUUID();
+ NestedUuidId out = new NestedUuidId();
+ out.id = UUID.randomUUID();
+ fixture.returnsData("'foo':{'id':'" + out.id + "'}");
+ NestedUuidIdApi api = fixture.build(NestedUuidIdApi.class);
+
+ NestedUuidId value = api.foo(in);
+
+ then(fixture.query()).isEqualTo("query foo($uuid: NestedUuidIdInput) { foo(uuid: $uuid) {id} }");
+ then(fixture.variables()).isEqualTo("{'uuid':{'id':'" + in.id + "'}}");
+ then(value.id).isEqualTo(out.id);
+ }
+ }
} | ['client/implementation/src/test/java/test/unit/ScalarBehavior.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/json/JsonStringReader.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/TypeInfo.java', 'client/implementation/src/main/java/io/smallrye/graphql/client/typesafe/impl/reflection/ParameterInfo.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 607,967 | 119,083 | 17,353 | 202 | 582 | 120 | 11 | 3 | 2,054 | 200 | 422 | 73 | 0 | 5 | 1970-01-01T00:26:46 | 132 | Java | {'Java': 2081470, 'JavaScript': 4025, 'Shell': 1757, 'Kotlin': 1745, 'HTML': 1357, 'CSS': 737} | Apache License 2.0 |
497 | gluufederation/oxtrust/1054/1053 | gluufederation | oxtrust | https://github.com/GluuFederation/oxTrust/issues/1053 | https://github.com/GluuFederation/oxTrust/pull/1054 | https://github.com/GluuFederation/oxTrust/pull/1054 | 1 | fix | Fix duplicate source server name: | # Description
The fix is already in master branch just need to move it to 3.1.4 as that is required for the next release
The corresponding issue was here https://github.com/GluuFederation/oxTrust/issues/1043. | 83ce8d6f0bce2317eca50640ed2555743f71d1e4 | 664eeed5b50503602a462364b1e12b1ebbe59325 | https://github.com/gluufederation/oxtrust/compare/83ce8d6f0bce2317eca50640ed2555743f71d1e4...664eeed5b50503602a462364b1e12b1ebbe59325 | diff --git a/server/src/main/java/org/gluu/oxtrust/action/ManagePersonAuthenticationAction.java b/server/src/main/java/org/gluu/oxtrust/action/ManagePersonAuthenticationAction.java
index 8faa9ef45..7918e6e68 100644
--- a/server/src/main/java/org/gluu/oxtrust/action/ManagePersonAuthenticationAction.java
+++ b/server/src/main/java/org/gluu/oxtrust/action/ManagePersonAuthenticationAction.java
@@ -278,22 +278,17 @@ public class ManagePersonAuthenticationAction
public boolean updateAuthConf(GluuAppliance appliance) {
try {
- String configId = null;
List<OxIDPAuthConf> idpConf = new ArrayList<OxIDPAuthConf>();
for (GluuLdapConfiguration ldapConfig : this.sourceConfigs) {
- if (idpConf.isEmpty()) {
- configId = ldapConfig.getConfigId();
- }
if (ldapConfig.isUseAnonymousBind()) {
ldapConfig.setBindDN(null);
}
OxIDPAuthConf ldapConfigIdpAuthConf = new OxIDPAuthConf();
- ldapConfig.setConfigId(configId);
ldapConfig.updateStringsLists();
ldapConfigIdpAuthConf.setType("auth");
ldapConfigIdpAuthConf.setVersion(ldapConfigIdpAuthConf.getVersion() + 1);
- ldapConfigIdpAuthConf.setName(configId);
+ ldapConfigIdpAuthConf.setName(ldapConfig.getConfigId());
ldapConfigIdpAuthConf.setEnabled(ldapConfig.isEnabled());
ldapConfigIdpAuthConf.setConfig(objectToJson(ldapConfig));
| ['server/src/main/java/org/gluu/oxtrust/action/ManagePersonAuthenticationAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,212,091 | 498,520 | 68,979 | 413 | 254 | 65 | 7 | 1 | 217 | 30 | 53 | 6 | 1 | 0 | 1970-01-01T00:25:32 | 127 | Java | {'Java': 2357692, 'HTML': 870626, 'JavaScript': 658438, 'CSS': 538961, 'SCSS': 28392, 'Handlebars': 5806, 'Python': 3149, 'Ruby': 1078} | MIT License |
1,385 | smallrye/smallrye-config/177/176 | smallrye | smallrye-config | https://github.com/smallrye/smallrye-config/issues/176 | https://github.com/smallrye/smallrye-config/pull/177 | https://github.com/smallrye/smallrye-config/pull/177 | 2 | fixes | 1.3.10 started returning Optional[false] for non existent boolean properties | In 1.3.9, `config.getOptionalValue("NON_EXISTENT_PROPERTY", Boolean.class)`, returns `Optional.empty`
In 1.3.10, the same call started returning `Optional[false]` instead. This is a backward incompatible change in behavior and is not consistent with the behavior for types other than boolean, for which this call still returns `Optional.empty`. | 7b2258f9f7b494252b2db6d4dcb6d9428d44ff66 | bc2ab1199ecba8f9212a4a1197cfcdee90961cc2 | https://github.com/smallrye/smallrye-config/compare/7b2258f9f7b494252b2db6d4dcb6d9428d44ff66...bc2ab1199ecba8f9212a4a1197cfcdee90961cc2 | diff --git a/implementation/src/main/java/io/smallrye/config/Converters.java b/implementation/src/main/java/io/smallrye/config/Converters.java
index 6ffc789b..f5bac62e 100644
--- a/implementation/src/main/java/io/smallrye/config/Converters.java
+++ b/implementation/src/main/java/io/smallrye/config/Converters.java
@@ -55,7 +55,7 @@ public final class Converters {
@SuppressWarnings("unchecked")
static final Converter<Boolean> BOOLEAN_CONVERTER = BuiltInConverter.of(1, (Converter & Serializable) value -> {
- if (value != null) {
+ if (value != null && !value.isEmpty()) {
return "TRUE".equalsIgnoreCase(value)
|| "1".equalsIgnoreCase(value)
|| "YES".equalsIgnoreCase(value)
@@ -72,19 +72,19 @@ public final class Converters {
@SuppressWarnings("unchecked")
static final Converter<Double> DOUBLE_CONVERTER = BuiltInConverter.of(2,
- (Converter & Serializable) value -> value != null ? Double.valueOf(value) : null);
+ (Converter & Serializable) value -> value != null && !value.isEmpty() ? Double.valueOf(value) : null);
@SuppressWarnings("unchecked")
static final Converter<Float> FLOAT_CONVERTER = BuiltInConverter.of(3,
- (Converter & Serializable) value -> value != null ? Float.valueOf(value) : null);
+ (Converter & Serializable) value -> value != null && !value.isEmpty() ? Float.valueOf(value) : null);
@SuppressWarnings("unchecked")
static final Converter<Long> LONG_CONVERTER = BuiltInConverter.of(4,
- (Converter & Serializable) value -> value != null ? Long.valueOf(value) : null);
+ (Converter & Serializable) value -> value != null && !value.isEmpty() ? Long.valueOf(value) : null);
@SuppressWarnings("unchecked")
static final Converter<Integer> INTEGER_CONVERTER = BuiltInConverter.of(5,
- (Converter & Serializable) value -> value != null ? Integer.valueOf(value) : null);
+ (Converter & Serializable) value -> value != null && !value.isEmpty() ? Integer.valueOf(value) : null);
@SuppressWarnings("unchecked")
static final Converter<Class<?>> CLASS_CONVERTER = BuiltInConverter.of(6, (Converter & Serializable) value -> {
@@ -121,7 +121,7 @@ public final class Converters {
@SuppressWarnings("unchecked")
static final Converter<Character> CHARACTER_CONVERTER = BuiltInConverter.of(11, (Converter & Serializable) value -> {
- if (value != null) {
+ if (value != null && !value.isEmpty()) {
if (value.length() == 1) {
return Character.valueOf(value.charAt(0));
}
diff --git a/implementation/src/test/java/io/smallrye/config/ConvertersTestCase.java b/implementation/src/test/java/io/smallrye/config/ConvertersTestCase.java
index ab5e63e6..add365a5 100644
--- a/implementation/src/test/java/io/smallrye/config/ConvertersTestCase.java
+++ b/implementation/src/test/java/io/smallrye/config/ConvertersTestCase.java
@@ -26,6 +26,9 @@ import java.util.Collection;
import java.util.Collections;
import java.util.NoSuchElementException;
import java.util.Optional;
+import java.util.OptionalDouble;
+import java.util.OptionalInt;
+import java.util.OptionalLong;
import org.eclipse.microprofile.config.spi.Converter;
import org.junit.Test;
@@ -203,6 +206,103 @@ public class ConvertersTestCase {
assertEquals(LocalDate.of(1950, 1, 1), config.getValue("when", dateConv4));
}
+ @Test
+ public void testEmpty() {
+ SmallRyeConfig config = buildConfig("int.key", "1234", "boolean.key", "true", "empty.key", "");
+ assertTrue(config.getOptionalValue("int.key", Integer.class).isPresent());
+ assertEquals(1234, config.getOptionalValue("int.key", Integer.class).get().intValue());
+ assertTrue(config.getValue("int.key", OptionalInt.class).isPresent());
+ assertFalse(config.getValue("int.missing.key", OptionalInt.class).isPresent());
+ assertFalse(config.getValue("empty.key", OptionalInt.class).isPresent());
+ assertEquals(1234, config.getValue("int.key", OptionalInt.class).getAsInt());
+ assertFalse(config.getOptionalValue("int.missing.key", Integer.class).isPresent());
+ assertFalse(config.getOptionalValue("empty.key", Integer.class).isPresent());
+
+ try {
+ config.getValue("empty.key", Integer.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+ try {
+ config.getValue("int.missing.key", Integer.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+
+ assertTrue(config.getOptionalValue("int.key", Long.class).isPresent());
+ assertEquals(1234, config.getOptionalValue("int.key", Long.class).get().intValue());
+ assertTrue(config.getValue("int.key", OptionalLong.class).isPresent());
+ assertEquals(1234, config.getValue("int.key", OptionalLong.class).getAsLong());
+ assertFalse(config.getValue("int.missing.key", OptionalLong.class).isPresent());
+ assertFalse(config.getValue("empty.key", OptionalLong.class).isPresent());
+ assertFalse(config.getOptionalValue("int.missing.key", Long.class).isPresent());
+ assertFalse(config.getOptionalValue("empty.key", Long.class).isPresent());
+
+ try {
+ config.getValue("empty.key", Long.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+ try {
+ config.getValue("int.missing.key", Long.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+
+ assertTrue(config.getOptionalValue("int.key", Double.class).isPresent());
+ assertEquals(1234, config.getOptionalValue("int.key", Double.class).get().intValue());
+ assertTrue(config.getValue("int.key", OptionalDouble.class).isPresent());
+ assertEquals(1234, config.getValue("int.key", OptionalDouble.class).getAsDouble(), 0.0);
+ assertFalse(config.getValue("int.missing.key", OptionalDouble.class).isPresent());
+ assertFalse(config.getValue("empty.key", OptionalDouble.class).isPresent());
+ assertFalse(config.getOptionalValue("int.missing.key", Double.class).isPresent());
+ assertFalse(config.getOptionalValue("empty.key", Double.class).isPresent());
+
+ try {
+ config.getValue("empty.key", Double.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+ try {
+ config.getValue("int.missing.key", Double.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+
+ assertTrue(config.getOptionalValue("int.key", Float.class).isPresent());
+ assertEquals(1234, config.getOptionalValue("int.key", Float.class).get().intValue());
+
+ try {
+ config.getValue("empty.key", Float.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+ try {
+ config.getValue("int.missing.key", Float.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+
+ assertTrue(config.getOptionalValue("boolean.key", Boolean.class).isPresent());
+ assertTrue(config.getValue("boolean.key", Boolean.class).booleanValue());
+ assertFalse(config.getOptionalValue("boolean.missing.key", Boolean.class).isPresent());
+ assertFalse(config.getOptionalValue("empty.key", Boolean.class).isPresent());
+ try {
+ config.getValue("empty.key", Boolean.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+ try {
+ config.getValue("boolean.missing.key", Boolean.class);
+ fail("Expected exception");
+ } catch (NoSuchElementException expected) {
+ }
+
+ assertFalse(config.getOptionalValue("empty.key", String.class).isPresent());
+
+ assertFalse(config.getOptionalValue("empty.key", Character.class).isPresent());
+ }
+
@SafeVarargs
private static <T> T[] array(T... items) {
return items; | ['implementation/src/test/java/io/smallrye/config/ConvertersTestCase.java', 'implementation/src/main/java/io/smallrye/config/Converters.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 132,073 | 26,072 | 3,744 | 39 | 1,003 | 212 | 12 | 1 | 347 | 42 | 81 | 3 | 0 | 0 | 1970-01-01T00:26:13 | 127 | Java | {'Java': 1275680, 'Shell': 1439, 'Kotlin': 1341} | Apache License 2.0 |
494 | gluufederation/oxtrust/550/545 | gluufederation | oxtrust | https://github.com/GluuFederation/oxTrust/issues/545 | https://github.com/GluuFederation/oxTrust/pull/550 | https://github.com/GluuFederation/oxTrust/pull/550#issuecomment-303082148 | 1 | resolve | Update Grant Types | We need to add the following grant types:
- password
- client_credentials
- urn:ietf:params:oauth:grant-type:uma-ticket | 5f788cd2337e0b3ed75b0c9007d8a0ce3d2652f8 | 2caf57076da501050745b60dbbacf514c2011d07 | https://github.com/gluufederation/oxtrust/compare/5f788cd2337e0b3ed75b0c9007d8a0ce3d2652f8...2caf57076da501050745b60dbbacf514c2011d07 | diff --git a/server/src/main/java/org/gluu/oxtrust/action/UpdateClientAction.java b/server/src/main/java/org/gluu/oxtrust/action/UpdateClientAction.java
index 878d46cd1..ebb513eef 100644
--- a/server/src/main/java/org/gluu/oxtrust/action/UpdateClientAction.java
+++ b/server/src/main/java/org/gluu/oxtrust/action/UpdateClientAction.java
@@ -790,7 +790,10 @@ public class UpdateClientAction implements Serializable {
tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.AUTHORIZATION_CODE));
tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.IMPLICIT));
tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.REFRESH_TOKEN));
-
+ tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.CLIENT_CREDENTIALS));
+ tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.RESOURCE_OWNER_PASSWORD_CREDENTIALS));
+ tmpAvailableGrantTypes.add(new SelectableEntity<GrantType>(GrantType.OXAUTH_UMA_TICKET));
+
this.availableGrantTypes = tmpAvailableGrantTypes;
selectAddedGrantTypes();
} | ['server/src/main/java/org/gluu/oxtrust/action/UpdateClientAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,492,901 | 573,731 | 81,163 | 504 | 328 | 73 | 5 | 1 | 123 | 14 | 30 | 4 | 0 | 0 | 1970-01-01T00:24:54 | 127 | Java | {'Java': 2357692, 'HTML': 870626, 'JavaScript': 658438, 'CSS': 538961, 'SCSS': 28392, 'Handlebars': 5806, 'Python': 3149, 'Ruby': 1078} | MIT License |
8,919 | rundeck/rundeck-cli/459/458 | rundeck | rundeck-cli | https://github.com/rundeck/rundeck-cli/issues/458 | https://github.com/rundeck/rundeck-cli/pull/459 | https://github.com/rundeck/rundeck-cli/pull/459 | 1 | fix | RUN-1002: rd projects scm perform project-commit issue | I seem to be unable to perform a project commit for multiple jobs using the cli.
Fail for multiple jobs that are surrounded by quotes:
```
rd projects scm perform --project=MyProject --integration="export" --field="message='Daily sync'" --action="project-commit" \\
--job="221533ab-6d50-4345-adda-1c2a5408ffae 15a8b7f4-3bc3-4783-b1b4-eebf6679269e"
Action project-commit failed
No jobs were selected
```
Pass for single job surrounded by quotes:
```
rd projects scm perform --project=MyProject --integration="export" --field="message='Daily sync'" --action="project-commit" \\
--job="15a8b7f4-3bc3-4783-b1b4-eebf6679269e"
# Action project-commit was successful.
# Result: SCM export Action was Successful: project-commit
```
Fail for multiple jobs without quotes:
```
rd projects scm perform --project=MyProject --integration="export" --field="message='Daily sync'" --action="project-commit" \\
--job=221533ab-6d50-4345-adda-1c2a5408ffae 15a8b7f4-3bc3-4783-b1b4-eebf6679269e
Unmatched argument at index 8: '15a8b7f4-3bc3-4783-b1b4-eebf6679269e'
```
According to the cli/docs, I can pass in space separate job ids.
```
$ rd --version
2.0.0
```
Rundeck version 4.3.1 | 2b4d3d44a52847227e774ed09ca285d0414d378a | 6240fecb9b889846357d7888d80e068eb66411af | https://github.com/rundeck/rundeck-cli/compare/2b4d3d44a52847227e774ed09ca285d0414d378a...6240fecb9b889846357d7888d80e068eb66411af | diff --git a/rd-cli-tool/src/main/java/org/rundeck/client/tool/commands/projects/SCM.java b/rd-cli-tool/src/main/java/org/rundeck/client/tool/commands/projects/SCM.java
index 8648aff..0bc4eac 100644
--- a/rd-cli-tool/src/main/java/org/rundeck/client/tool/commands/projects/SCM.java
+++ b/rd-cli-tool/src/main/java/org/rundeck/client/tool/commands/projects/SCM.java
@@ -284,10 +284,10 @@ public class SCM extends BaseCommand {
@Getter @Setter
public static class ActionPerformOptions extends ActionInputsOptions {
- @CommandLine.Option(names = {"--field", "-f"}, description = "Field input values, space separated key=value list")
+ @CommandLine.Option(names = {"--field", "-f"}, arity = "1..*", description = "Field input values, space separated key=value list")
private List<String> fields;
- @CommandLine.Option(names = {"--item", "-I"}, description = "Items to include, space separated list")
+ @CommandLine.Option(names = {"--item", "-I"}, arity = "1..*", description = "Items to include, space separated list")
private List<String> item;
@CommandLine.Option(names = {"-A", "--allitems"},
@@ -314,11 +314,12 @@ public class SCM extends BaseCommand {
"(import only)")
boolean allUntrackedItems;
- @CommandLine.Option(names = {"--job", "-j"}, description = "Job IDs to include, space separated list")
+ @CommandLine.Option(names = {"--job", "-j"}, arity = "1..*", description = "Job IDs to include, space separated list")
List<String> job;
@CommandLine.Option(names = {"--delete", "-d"},
+ arity = "1..*",
description = "Job IDs or Item Ids to delete, space separated list")
List<String> delete;
| ['rd-cli-tool/src/main/java/org/rundeck/client/tool/commands/projects/SCM.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 662,402 | 137,537 | 20,265 | 196 | 774 | 176 | 7 | 1 | 1,212 | 126 | 379 | 37 | 0 | 4 | 1970-01-01T00:27:35 | 123 | Java | {'Java': 657161, 'Groovy': 193234, 'Shell': 3366, 'Dockerfile': 2799} | Apache License 2.0 |
1,379 | hapifhir/org.hl7.fhir.core/885/884 | hapifhir | org.hl7.fhir.core | https://github.com/hapifhir/org.hl7.fhir.core/issues/884 | https://github.com/hapifhir/org.hl7.fhir.core/pull/885 | https://github.com/hapifhir/org.hl7.fhir.core/pull/885 | 1 | fixes | "5.0.0-snapshot2" is missing from enumerations class | The new version: "5.0.0-snapshot2" was introduced ([In Constants](https://github.com/hapifhir/org.hl7.fhir.core/blob/master/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Constants.java)), but the version wasn't added to [Enumerations](https://github.com/hapifhir/org.hl7.fhir.core/blob/master/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java) | f3fa8cae2452336c06775de90eb8dd2d0a225c94 | 1441141d95c8a89da068ac3463648c6687078d3e | https://github.com/hapifhir/org.hl7.fhir.core/compare/f3fa8cae2452336c06775de90eb8dd2d0a225c94...1441141d95c8a89da068ac3463648c6687078d3e | diff --git a/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java b/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java
index 163e96d32..7c205f623 100644
--- a/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java
+++ b/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java
@@ -3490,8 +3490,8 @@ public class Enumerations {
*/
MEDICATIONREQUEST,
/**
- * A record of a medication that is being consumed by a patient. A MedicationUsage may indicate that the patient may be taking the medication now or has taken the medication in the past or will be taking the medication in the future. The source of this information can be the patient, significant other (such as a family member or spouse), or a clinician. A common scenario where this information is captured is during the history taking process during a patient visit or stay. The medication information may come from sources such as the patient's memory, from a prescription bottle, or from a list of medications the patient, clinician or other party maintains.
-
+ * A record of a medication that is being consumed by a patient. A MedicationUsage may indicate that the patient may be taking the medication now or has taken the medication in the past or will be taking the medication in the future. The source of this information can be the patient, significant other (such as a family member or spouse), or a clinician. A common scenario where this information is captured is during the history taking process during a patient visit or stay. The medication information may come from sources such as the patient's memory, from a prescription bottle, or from a list of medications the patient, clinician or other party maintains.
+
The primary difference between a medicationusage and a medicationadministration is that the medication administration has complete administration information and is based on actual administration information from the person who administered the medication. A medicationusage is often, if not always, less specific. There is no required date/time when the medication was administered, in fact we only know that a source has reported the patient is taking this medication, where details such as time, quantity, or rate or even medication product may be incomplete or missing or less precise. As stated earlier, the Medication Usage information may come from the patient's memory, from a prescription bottle or from a list of medications the patient, clinician or other party maintains. Medication administration is more formal and is not missing detailed information.
*/
MEDICATIONUSAGE,
@@ -6699,8 +6699,12 @@ The primary difference between a medicationusage and a medicationadministration
/**
* R5 Snapshot1
*/
- _5_0_0SNAPSHOT1,
+ _5_0_0SNAPSHOT1,
/**
+ * R5 Snapshot2
+ */
+ _5_0_0SNAPSHOT2,
+ /**
* added to help the parsers
*/
NULL;
@@ -6813,6 +6817,8 @@ The primary difference between a medicationusage and a medicationadministration
return _5_0_0CIBUILD;
if ("5.0.0-snapshot1".equals(codeString))
return _5_0_0SNAPSHOT1;
+ if ("5.0.0-snapshot2".equals(codeString))
+ return _5_0_0SNAPSHOT2;
throw new FHIRException("Unknown FHIRVersion code '"+codeString+"'");
}
public String toCode() {
@@ -6870,6 +6876,7 @@ The primary difference between a medicationusage and a medicationadministration
case _5_0_0: return "5.0.0";
case _5_0_0CIBUILD: return "5.0.0-cibuild";
case _5_0_0SNAPSHOT1: return "5.0.0-snapshot1";
+ case _5_0_0SNAPSHOT2: return "5.0.0-snapshot2";
case NULL: return null;
default: return "?";
}
@@ -6929,6 +6936,7 @@ The primary difference between a medicationusage and a medicationadministration
case _5_0_0: return "http://hl7.org/fhir/FHIR-version";
case _5_0_0CIBUILD: return "http://hl7.org/fhir/FHIR-version";
case _5_0_0SNAPSHOT1: return "http://hl7.org/fhir/FHIR-version";
+ case _5_0_0SNAPSHOT2: return "http://hl7.org/fhir/FHIR-version";
case NULL: return null;
default: return "?";
}
@@ -6988,6 +6996,7 @@ The primary difference between a medicationusage and a medicationadministration
case _5_0_0: return "R5";
case _5_0_0CIBUILD: return "R5 CIBuild";
case _5_0_0SNAPSHOT1: return "R5 Snapshot1";
+ case _5_0_0SNAPSHOT2: return "R5 Snapshot2";
case NULL: return null;
default: return "?";
}
@@ -7047,6 +7056,7 @@ The primary difference between a medicationusage and a medicationadministration
case _5_0_0: return "5.0.0";
case _5_0_0CIBUILD: return "5.0.0-cibuild";
case _5_0_0SNAPSHOT1: return "5.0.0-snapshot1";
+ case _5_0_0SNAPSHOT2: return "5.0.0-snapshot2";
case NULL: return null;
default: return "?";
}
@@ -7064,7 +7074,7 @@ The primary difference between a medicationusage and a medicationadministration
public static boolean isValidCode(String codeString) {
return Utilities.existsInList(codeString, "0.01", "0.05", "0.06", "0.11", "0.0.80", "0.0.81" ,"0.0.82", "0.4.0", "0.5.0",
"1.0.0", "1.0.1", "1.0.2", "1.1.0", "1.4.0", "1.6.0", "1.8.0", "3.0.0", "3.0.1", "3.0.2", "3.3.0", "3.5.0",
- "4.0.0", "4.0.1", "4.1.0" ,"4.2.0" ,"4.3.0-snapshot1" ,"4.3.0-cibuild" ,"4.3.0", "5.0.0", "5.0.0-cibuild", "5.0.0-snapshot1");
+ "4.0.0", "4.0.1", "4.1.0" ,"4.2.0" ,"4.3.0-snapshot1" ,"4.3.0-cibuild" ,"4.3.0", "5.0.0", "5.0.0-cibuild", "5.0.0-snapshot1", "5.0.0-snapshot2");
}
@@ -7192,6 +7202,8 @@ The primary difference between a medicationusage and a medicationadministration
return FHIRVersion._5_0_0CIBUILD;
if ("5.0.0-snapshot1".equals(codeString))
return FHIRVersion._5_0_0SNAPSHOT1;
+ if ("5.0.0-snapshot2".equals(codeString))
+ return FHIRVersion._5_0_0SNAPSHOT2;
throw new IllegalArgumentException("Unknown FHIRVersion code '"+codeString+"'");
}
public Enumeration<FHIRVersion> fromType(Base code) throws FHIRException {
@@ -7308,6 +7320,8 @@ The primary difference between a medicationusage and a medicationadministration
return new Enumeration<FHIRVersion>(this, FHIRVersion._5_0_0CIBUILD);
if ("5.0.0-snapshot1".equals(codeString))
return new Enumeration<FHIRVersion>(this, FHIRVersion._5_0_0SNAPSHOT1);
+ if ("5.0.0-snapshot2".equals(codeString))
+ return new Enumeration<FHIRVersion>(this, FHIRVersion._5_0_0SNAPSHOT2);
throw new FHIRException("Unknown FHIRVersion code '"+codeString+"'");
}
public String toCode(FHIRVersion code) {
@@ -7417,6 +7431,8 @@ The primary difference between a medicationusage and a medicationadministration
return "5.0.0-cibuild";
if (code == FHIRVersion._5_0_0SNAPSHOT1)
return "5.0.0-snapshot1";
+ if (code == FHIRVersion._5_0_0SNAPSHOT2)
+ return "5.0.0-snapshot2";
return "?";
}
public String toSystem(FHIRVersion code) {
@@ -9913,8 +9929,8 @@ The primary difference between a medicationusage and a medicationadministration
*/
MEDICATIONREQUEST,
/**
- * A record of a medication that is being consumed by a patient. A MedicationUsage may indicate that the patient may be taking the medication now or has taken the medication in the past or will be taking the medication in the future. The source of this information can be the patient, significant other (such as a family member or spouse), or a clinician. A common scenario where this information is captured is during the history taking process during a patient visit or stay. The medication information may come from sources such as the patient's memory, from a prescription bottle, or from a list of medications the patient, clinician or other party maintains.
-
+ * A record of a medication that is being consumed by a patient. A MedicationUsage may indicate that the patient may be taking the medication now or has taken the medication in the past or will be taking the medication in the future. The source of this information can be the patient, significant other (such as a family member or spouse), or a clinician. A common scenario where this information is captured is during the history taking process during a patient visit or stay. The medication information may come from sources such as the patient's memory, from a prescription bottle, or from a list of medications the patient, clinician or other party maintains.
+
The primary difference between a medicationusage and a medicationadministration is that the medication administration has complete administration information and is based on actual administration information from the person who administered the medication. A medicationusage is often, if not always, less specific. There is no required date/time when the medication was administered, in fact we only know that a source has reported the patient is taking this medication, where details such as time, quantity, or rate or even medication product may be incomplete or missing or less precise. As stated earlier, the Medication Usage information may come from the patient's memory, from a prescription bottle or from a list of medications the patient, clinician or other party maintains. Medication administration is more formal and is not missing detailed information.
*/
MEDICATIONUSAGE,
diff --git a/org.hl7.fhir.r5/src/test/java/org/hl7/fhir/r5/test/ResourceTests.java b/org.hl7.fhir.r5/src/test/java/org/hl7/fhir/r5/test/ResourceTests.java
index f4d97151a..e29bd0ed7 100644
--- a/org.hl7.fhir.r5/src/test/java/org/hl7/fhir/r5/test/ResourceTests.java
+++ b/org.hl7.fhir.r5/src/test/java/org/hl7/fhir/r5/test/ResourceTests.java
@@ -12,6 +12,7 @@ import org.hl7.fhir.r5.model.CapabilityStatement;
import org.hl7.fhir.r5.model.CodeSystem;
import org.hl7.fhir.r5.model.CompartmentDefinition;
import org.hl7.fhir.r5.model.ConceptMap;
+import org.hl7.fhir.r5.model.Constants;
import org.hl7.fhir.r5.model.ExampleScenario;
import org.hl7.fhir.r5.model.GraphDefinition;
import org.hl7.fhir.r5.model.ImplementationGuide;
@@ -70,4 +71,11 @@ class ResourceTests {
String output = xml.composeString(res);
assertEquals(TGT, output);
}
+
+ @Test
+ void testCapabilityStatementFhirVersion() {
+ CapabilityStatement cap = new CapabilityStatement();
+ cap.getFhirVersionElement().setValueAsString(Constants.VERSION);
+ assertEquals(Constants.VERSION, cap.getFhirVersion().getDisplay());
+ }
}
| ['org.hl7.fhir.r5/src/test/java/org/hl7/fhir/r5/test/ResourceTests.java', 'org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/model/Enumerations.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 206,367,491 | 45,406,310 | 4,414,018 | 6,919 | 3,852 | 1,009 | 28 | 1 | 369 | 15 | 106 | 1 | 2 | 0 | 1970-01-01T00:27:38 | 123 | Java | {'Java': 225424124, 'Batchfile': 790} | Apache License 2.0 |
1,378 | hapifhir/org.hl7.fhir.core/968/967 | hapifhir | org.hl7.fhir.core | https://github.com/hapifhir/org.hl7.fhir.core/issues/967 | https://github.com/hapifhir/org.hl7.fhir.core/pull/968 | https://github.com/hapifhir/org.hl7.fhir.core/pull/968 | 1 | fixes | maxValue contraint on decimal values is ignored and doesn't produce errors | ### Problem description
It seems, that the validator doesn't evaluate the maxValue constraint for decimal data types in profiles (**maxValueDecimal**). If the constraint is violated, no errors are produced. The following minimal example ([MedicationRequestWithDispenseRequestQuantityLimit.zip](https://github.com/hapifhir/org.hl7.fhir.core/files/9888003/MedicationRequestWithDispenseRequestQuantityLimit.zip)) demonstrates the behavior (take notice of the profile constraint maxValueDecimal = 999 and the actual value of 1000):
#### Profile
`{
"resourceType": "StructureDefinition",
"id": "MedicationRequestWithDispenseRequestQuantityLimit",
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-category",
"valueString": "Clinical.Medications"
},
{
"url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-security-category",
"valueCode": "patient"
}
],
"url": "http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit",
"version": "1.0.0",
"name": "MedicationRequestWithDispenseRequestQuantityLimit",
"title": "Example of MedicationRequest profile for demonstration of HL7 Java Validator Bug with maxValueDecimal-Evaluation",
"status": "active",
"description": "Validator doesn't throw any errors for instances with MedicationRequest.dispenseRequest.quantity.value > specified maxValueDecimal",
"fhirVersion": "4.0.1",
"mapping": [
{
"identity": "workflow",
"uri": "http://hl7.org/fhir/workflow",
"name": "Workflow Pattern"
},
{
"identity": "script10.6",
"uri": "http://ncpdp.org/SCRIPT10_6",
"name": "Mapping to NCPDP SCRIPT 10.6"
},
{
"identity": "rim",
"uri": "http://hl7.org/v3",
"name": "RIM Mapping"
},
{
"identity": "w5",
"uri": "http://hl7.org/fhir/fivews",
"name": "FiveWs Pattern Mapping"
},
{
"identity": "v2",
"uri": "http://hl7.org/v2",
"name": "HL7 v2 Mapping"
}
],
"kind": "resource",
"abstract": false,
"type": "MedicationRequest",
"baseDefinition": "http://hl7.org/fhir/StructureDefinition/MedicationRequest",
"derivation": "constraint",
"differential": {
"element": [
{
"id": "MedicationRequest.dispenseRequest",
"path": "MedicationRequest.dispenseRequest",
"min": 1,
"mustSupport": true
},
{
"id": "MedicationRequest.dispenseRequest.quantity",
"path": "MedicationRequest.dispenseRequest.quantity",
"min": 1,
"mustSupport": true
},
{
"id": "MedicationRequest.dispenseRequest.quantity.value",
"path": "MedicationRequest.dispenseRequest.quantity.value",
"min": 1,
"maxValueDecimal": 999,
"mustSupport": true
}
]
}
}`
#### Instance
`<MedicationRequest xmlns="http://hl7.org/fhir">
<id value="medrx0304" />
<meta>
<profile value="http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit" />
</meta>
<contained>
<Medication>
<id value="med0312" />
<code>
<coding>
<system value="http://snomed.info/sct" />
<code value="324689003" />
<display value="Nystatin 100000 unit/mL oral suspension" />
</coding>
</code>
</Medication>
</contained>
<status value="completed" />
<intent value="order" />
<medicationReference>
<reference value="#med0312" />
<display value="Nystatin 100,000 u/ml oral suspension" />
</medicationReference>
<subject>
<reference value="http://example.org/Patient/pat1" />
<display value="Donald Duck" />
</subject>
<authoredOn value="2015-01-15" />
<dispenseRequest>
<quantity>
<value value="1000" />
</quantity>
</dispenseRequest>
</MedicationRequest>`
#### Call to validator
`java -jar .\\validator_cli.jar .\\MedicationRequest.xml -ig .\\StructureDefinition-MedicationRequestWithDispenseRequestQuantityLimit.json -profile http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit`
#### Output
> FHIR Validation tool Version 5.6.74 (Git# 325fd0736d48). Built 2022-10-25T09:35:43.821Z (3 days old)
> Java: 11.0.14.1 from C:\\Program Files\\OpenJDK\\openjdk-11.0.14.1_1 on amd64 (64bit). 8136MB available
>
> WARNING: Default file encoding is windows-1252 which may cause unexpected results.
> To fix this issue, run this program with the parameter '-Dfile.encoding=UTF-8'
> Future releases may not be able to run at all with encoding windows-1252
>
> Paths: Current = C:\\Dev\\fhir validator, Package Cache = C:\\Users\\...\\.fhir\\packages
> Params: .\\MedicationRequest.xml -ig .\\StructureDefinition-MedicationRequestWithDispenseRequestQuantityLimit.json -profile http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit
> Scanning for versions (no -version parameter):
> fhirVersion in .\\StructureDefinition-MedicationRequestWithDispenseRequestQuantityLimit.json: 4.0
> -> use version 4.0
> Jurisdiction: No Jurisdiction
> Loading
> Load FHIR v4.0 from hl7.fhir.r4.core#4.0.1 Load hl7.terminology.r4#4.0.0 - 4164 resources (00:04.884)
> - 7383 resources (00:00.000)
> Load hl7.terminology#4.0.0 - 4164 resources (00:00.724)
> Load R5 Extensions - 123 resources (00:06.213)
> Terminology server http://tx.fhir.org - Version 2.0.14 (00:01.082)
> Load .\\StructureDefinition-MedicationRequestWithDispenseRequestQuantityLimit.json - 1 resources (00:00.002)
> Get set... go (00:00.014)
> Validating
> Profiles: [http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit]
> Validate .\\MedicationRequest.xml
> Validate MedicationRequest against http://example.org/StructureDefinition/MedicationRequestWithDispenseRequestQuantityLimit..........20..........40..........60..........80.........|
> 00:00.405
> Done. Times: Loading: 00:13.215, validation: 00:00.405. Memory = 526Mb
>
> Success: 0 errors, 0 warnings, 1 notes
> Information: All OK
#### Expected output
> *FAILURE*: 1 errors, 0 warnings, 0 notes
> Error @ MedicationRequest.dispenseRequest.quantity.value (line 31, col35): value is greater than permitted maximum value of DecimalType[999]
### Possible fix
The error seems to locate in InstanceValidator:2415 and InstanceValidator:2416 which assume an integer datatype instead of a decimal one. A fix can be:
`ok = rule(errors, IssueType.INVALID, e.line(), e.col(), path, !context.hasMaxValueDecimalType() ||
!context.getMaxValueDecimalType().hasValue() || checkDecimalMaxValue(v, context.getMaxValueDecimalType().getValue()), I18nConstants.TYPE_SPECIFIC_CHECKS_DT_DECIMAL_GT, (context.hasMaxValueDecimalType() ? context.getMaxValueDecimalType() : "")) && ok;`
| f0c53bd7a9968a410671e706ab322551485fbdd0 | 413f3a35430f83b8cd3e93b45a74efd792c9e369 | https://github.com/hapifhir/org.hl7.fhir.core/compare/f0c53bd7a9968a410671e706ab322551485fbdd0...413f3a35430f83b8cd3e93b45a74efd792c9e369 | diff --git a/org.hl7.fhir.validation/src/main/java/org/hl7/fhir/validation/instance/InstanceValidator.java b/org.hl7.fhir.validation/src/main/java/org/hl7/fhir/validation/instance/InstanceValidator.java
index b0f084daa..069635dcc 100644
--- a/org.hl7.fhir.validation/src/main/java/org/hl7/fhir/validation/instance/InstanceValidator.java
+++ b/org.hl7.fhir.validation/src/main/java/org/hl7/fhir/validation/instance/InstanceValidator.java
@@ -2412,8 +2412,8 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
warning(errors, IssueType.VALUE, e.line(), e.col(), path, ds != DecimalStatus.RANGE, I18nConstants.TYPE_SPECIFIC_CHECKS_DT_DECIMAL_RANGE, e.primitiveValue());
try {
Decimal v = new Decimal(e.getValue());
- ok = rule(errors, IssueType.INVALID, e.line(), e.col(), path, !context.hasMaxValueIntegerType() ||
- !context.getMaxValueIntegerType().hasValue() || checkDecimalMaxValue(v, context.getMaxValueDecimalType().getValue()), I18nConstants.TYPE_SPECIFIC_CHECKS_DT_DECIMAL_GT, (context.hasMaxValueIntegerType() ? context.getMaxValueIntegerType() : "")) && ok;
+ ok = rule(errors, IssueType.INVALID, e.line(), e.col(), path, !context.hasMaxValueDecimalType() ||
+ !context.getMaxValueDecimalType().hasValue() || checkDecimalMaxValue(v, context.getMaxValueDecimalType().getValue()), I18nConstants.TYPE_SPECIFIC_CHECKS_DT_DECIMAL_GT, (context.hasMaxValueDecimalType() ? context.getMaxValueDecimalType() : "")) && ok;
ok = rule(errors, IssueType.INVALID, e.line(), e.col(), path, !context.hasMinValueIntegerType() ||
!context.getMinValueIntegerType().hasValue() || checkDecimalMinValue(v, context.getMaxValueDecimalType().getValue()), I18nConstants.TYPE_SPECIFIC_CHECKS_DT_DECIMAL_LT, (context.hasMinValueIntegerType() ? context.getMinValueIntegerType() : "")) && ok;
} catch (Exception ex) { | ['org.hl7.fhir.validation/src/main/java/org/hl7/fhir/validation/instance/InstanceValidator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 211,817,684 | 46,676,623 | 4,513,967 | 7,132 | 760 | 178 | 4 | 1 | 7,161 | 569 | 1,870 | 164 | 19 | 0 | 1970-01-01T00:27:46 | 123 | Java | {'Java': 225424124, 'Batchfile': 790} | Apache License 2.0 |
1,377 | hapifhir/org.hl7.fhir.core/1041/1040 | hapifhir | org.hl7.fhir.core | https://github.com/hapifhir/org.hl7.fhir.core/issues/1040 | https://github.com/hapifhir/org.hl7.fhir.core/pull/1041 | https://github.com/hapifhir/org.hl7.fhir.core/pull/1041 | 1 | fixes | NullPointerException while comparing profiles | ### Problem Description
The validator throws an exception while comparing profiles, which constraint meta.profile to some fixed values.
### Steps to reproduce
Call the CLI version of validator as follows:
`java -jar validator_cli.jar -compare -dest "c:\\Dev\\fhir validator\\compare_results" -version 4.0.1 -ig kbv.ita.for#1.1.0 ig kbv.basis#1.3.0 -left "https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient" -right "https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient"`
Output:
FHIR Validation tool Version 5.6.87 (Git# 6d8cd59f1ee0). Built 2022-12-14T03:25:37.265Z (60 hours old)
Java: 11.0.14.1 from C:\\Program Files\\OpenJDK\\openjdk-11.0.14.1_1 on amd64 (64bit). 8136MB available
WARNING: Default file encoding is windows-1252 which may cause unexpected results.
To fix this issue, run this program with the parameter '-Dfile.encoding=UTF-8'
Future releases may not be able to run at all with encoding windows-1252
Valid destination directory provided: "c:\\Dev\\fhir validator\\compare_results")
Paths: Current = c:\\Dev\\fhir validator, Package Cache = C:\\Users\\...\\.fhir\\packages
Params: -compare -dest "c:\\Dev\\fhir validator\\compare_results" -version 4.0.1 -ig kbv.ita.for#1.1.0 ig kbv.basis#1.3.0 -left https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient -right https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient
Load FHIR v4.0 from hl7.fhir.r4.core#4.0.1.: - 4576 resources (00:05.145)
Load hl7.terminology#4.0.0 - 4164 resources (00:02.725)
Load R5 Extensions - 127 resources (00:02.379)
Terminology server http://tx.fhir.org - Version 2.0.14 (00:01.121)
Load de.basisprofil.r4#1.3.2 - 177 resources (00:00.219)
Load KBV.Basis#1.3.0 - 146 resources (00:01.981)
Load kbv.ita.for#1.1.0 - 20 resources (00:00.099)
Package Summary: [hl7.fhir.r4.core#4.0.1, hl7.fhir.xver-extensions#0.0.11, hl7.terminology#4.0.0, de.basisprofil.r4#1.3.2, KBV.Basis#1.3.0, kbv.ita.for#1.1.0]
Get set... go (00:01.393)
Load hl7.fhir.pubpack#0.1.4 - 0 resources (00:00.718)
Comparing StructureDefinitions https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient to https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient
Generating output to c:\\Dev\\fhir validator\\compare_results...
Exception rendering https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient|1.1.0||https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient|1.3.0: null
java.lang.NullPointerException
at org.hl7.fhir.r5.conformance.ProfileUtilities.generateDescription(ProfileUtilities.java:4151)
at org.hl7.fhir.r5.conformance.ProfileUtilities.generateDescription(ProfileUtilities.java:3882)
at org.hl7.fhir.r5.conformance.ProfileUtilities.genElementCells(ProfileUtilities.java:3575)
at org.hl7.fhir.r5.comparison.ProfileComparer.genElementComp(ProfileComparer.java:1002)
at org.hl7.fhir.r5.comparison.ProfileComparer.genElementComp(ProfileComparer.java:1014)
at org.hl7.fhir.r5.comparison.ProfileComparer.genElementComp(ProfileComparer.java:1014)
at org.hl7.fhir.r5.comparison.ProfileComparer.renderStructure(ProfileComparer.java:937)
at org.hl7.fhir.r5.comparison.ComparisonRenderer.renderProfile(ComparisonRenderer.java:224)
at org.hl7.fhir.r5.comparison.ComparisonRenderer.renderComparison(ComparisonRenderer.java:144)
at org.hl7.fhir.r5.comparison.ComparisonRenderer.processList(ComparisonRenderer.java:111)
at org.hl7.fhir.r5.comparison.ComparisonRenderer.render(ComparisonRenderer.java:87)
at org.hl7.fhir.validation.cli.services.ComparisonService.compareStructureDefinitions(ComparisonService.java:74)
at org.hl7.fhir.validation.cli.services.ComparisonService.doLeftRightComparison(ComparisonService.java:37)
at org.hl7.fhir.validation.ValidatorCli.doLeftRightComparison(ValidatorCli.java:265)
at org.hl7.fhir.validation.ValidatorCli.main(ValidatorCli.java:131)
Done
Details of comparison in file:///C:/Dev/fhir%20validator/compare_results/sd-KBV-PR-FOR-Patient-KBV-PR-Base-Patient.html do not exist
### Expected output
```
FHIR Validation tool Version 5.6.89-SNAPSHOT (Git# aaa5c9d6a409). Built 2022-12-16T16:31:11.232+01:00 (8 mins old)
Java: 11.0.14.1 from C:\\Program Files\\OpenJDK\\openjdk-11.0.14.1_1 on amd64 (64bit). 8136MB available
WARNING: Default file encoding is windows-1252 which may cause unexpected results.
To fix this issue, run this program with the parameter '-Dfile.encoding=UTF-8'
Future releases may not be able to run at all with encoding windows-1252
Valid destination directory provided: "c:\\Dev\\fhir validator\\compare_results")
Paths: Current = c:\\Dev\\fhir validator, Package Cache = C:\\Users\\...\\.fhir\\packages
Params: -compare -dest "c:\\Dev\\fhir validator\\compare_results" -version 4.0.1 -ig kbv.ita.for#1.1.0 ig kbv.basis#1.1.3 -left https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient -right https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient
Load FHIR v4.0 from hl7.fhir.r4.core#4.0.1.: - 4576 resources (00:04.293)
Load hl7.terminology#4.0.0 - 4164 resources (00:01.632)
Load R5 Extensions - 127 resources (00:02.129)
Terminology server http://tx.fhir.org - Version 2.0.14 (00:01.177)
Load de.basisprofil.r4#1.3.2 - 177 resources (00:00.211)
Load KBV.Basis#1.3.0 - 146 resources (00:01.896)
Load kbv.ita.for#1.1.0 - 20 resources (00:00.092)
Package Summary: [hl7.fhir.r4.core#4.0.1, hl7.fhir.xver-extensions#0.0.11, hl7.terminology#4.0.0, de.basisprofil.r4#1.3.2, KBV.Basis#1.3.0, kbv.ita.for#1.1.0]
Get set... go (00:01.515)
Load hl7.fhir.pubpack#0.1.4 - 0 resources (00:00.753)
Comparing StructureDefinitions https://fhir.kbv.de/StructureDefinition/KBV_PR_FOR_Patient to https://fhir.kbv.de/StructureDefinition/KBV_PR_Base_Patient
Generating output to c:\\Dev\\fhir validator\\compare_results...
Done
```
Comparison details at file:///C:/Dev/fhir%20validator/compare_results/sd-KBV-PR-FOR-Patient-KBV-PR-Base-Patient.html should exist | aaa5c9d6a4093ec3152a82a9ccd40be7966993fe | 84508415efc75a8d17924704870c9272965f2b9e | https://github.com/hapifhir/org.hl7.fhir.core/compare/aaa5c9d6a4093ec3152a82a9ccd40be7966993fe...84508415efc75a8d17924704870c9272965f2b9e | diff --git a/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/conformance/ProfileUtilities.java b/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/conformance/ProfileUtilities.java
index 89d0e134a..151a12ad5 100644
--- a/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/conformance/ProfileUtilities.java
+++ b/org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/conformance/ProfileUtilities.java
@@ -4147,7 +4147,7 @@ public class ProfileUtilities extends TranslatingUtilities {
if (!useTableForFixedValues || !allowSubRows || definition.getFixed().isPrimitive()) {
String s = buildJson(definition.getFixed());
String link = null;
- if (Utilities.isAbsoluteUrl(s))
+ if (Utilities.isAbsoluteUrl(s) && pkp != null)
link = pkp.getLinkForUrl(corePath, s);
c.getPieces().add(checkForNoChange(definition.getFixed(), gen.new Piece(link, s, null).addStyle("color: darkgreen")));
} else { | ['org.hl7.fhir.r5/src/main/java/org/hl7/fhir/r5/conformance/ProfileUtilities.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 216,077,932 | 47,531,690 | 4,540,085 | 7,153 | 108 | 24 | 2 | 1 | 6,082 | 428 | 1,848 | 81 | 14 | 1 | 1970-01-01T00:27:51 | 123 | Java | {'Java': 225424124, 'Batchfile': 790} | Apache License 2.0 |
1,376 | hapifhir/org.hl7.fhir.core/1098/1091 | hapifhir | org.hl7.fhir.core | https://github.com/hapifhir/org.hl7.fhir.core/issues/1091 | https://github.com/hapifhir/org.hl7.fhir.core/pull/1098 | https://github.com/hapifhir/org.hl7.fhir.core/pull/1098 | 1 | fixes | Pluralization code returning constant name instead of template for HAPI-FHIR test | The following test in HAPI-FHIR fails with the new pluralization error message:
FhirPathFilterInterceptorTest
It is now returning `org.hl7.fhir.exceptions.PathEngineException: FHIRPATH_LEFT_VALUE` where it previously gave the templated message: `org.hl7.fhir.exceptions.PathEngineException: Error performing *: left operand has more than one value` | 61e63b128a4c10f29bba4e07beb5b2a277ae46ff | f8fa68cc4a193383f43f6e551280e7323dc84e82 | https://github.com/hapifhir/org.hl7.fhir.core/compare/61e63b128a4c10f29bba4e07beb5b2a277ae46ff...f8fa68cc4a193383f43f6e551280e7323dc84e82 | diff --git a/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/context/IWorkerContext.java b/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/context/IWorkerContext.java
index 0e420ebf6..db92e864a 100644
--- a/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/context/IWorkerContext.java
+++ b/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/context/IWorkerContext.java
@@ -299,6 +299,8 @@ public interface IWorkerContext {
String formatMessage(String theMessage, Object... theMessageArguments);
+ String formatMessagePlural(Integer pl, String theMessage, Object... theMessageArguments);
+
void setValidationMessageLanguage(Locale locale);
public class ValidationResult {
diff --git a/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/utils/FHIRPathEngine.java b/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/utils/FHIRPathEngine.java
index 2c865e49d..5e55e6401 100644
--- a/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/utils/FHIRPathEngine.java
+++ b/org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/utils/FHIRPathEngine.java
@@ -575,7 +575,7 @@ public class FHIRPathEngine {
}
private FHIRException makeExceptionPlural(Integer num, ExpressionNode holder, String constName, Object... args) {
- String fmt = worker.formatMessage(constName, args);
+ String fmt = worker.formatMessagePlural(num, constName, args);
if (location != null) {
fmt = fmt + " "+worker.formatMessage(I18nConstants.FHIRPATH_LOCATION, location);
} | ['org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/context/IWorkerContext.java', 'org.hl7.fhir.r4/src/main/java/org/hl7/fhir/r4/utils/FHIRPathEngine.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 216,433,228 | 47,618,588 | 4,547,229 | 7,177 | 218 | 47 | 4 | 2 | 354 | 37 | 75 | 5 | 0 | 0 | 1970-01-01T00:27:55 | 123 | Java | {'Java': 225424124, 'Batchfile': 790} | Apache License 2.0 |
929 | openliberty/ci.maven/1498/1497 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/1497 | https://github.com/OpenLiberty/ci.maven/pull/1498 | https://github.com/OpenLiberty/ci.maven/pull/1498 | 1 | fixes | FeatureModifiedException occurring often with multi-module project | Using https://github.ibm.com/was-svt/acme-ee
Most attempts to generate features result in two calls to the binary scanner and a FeatureModifiedException.
This especially does not make sense for the initial call to the binary scanner as there are no current features passed to the binary scanner. And on calls made afterwards, the current features were all determined by binary scanner from the initial call.
Below is some debug output from running `mvn liberty:dev` (including changes from https://github.com/OpenLiberty/ci.common/pull/361 - also confirmed this error occurs without these changes)
Initial generate features on startup
```
[DEBUG] Calling binary-app-scanner-22.0.0.2-SNAPSHOT.jar with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m2\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m1\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityPojoWSes\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityWeb\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS_CDI\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3MDB\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: []
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] Recalling binary scanner with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m2\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m1\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityPojoWSes\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityWeb\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS_CDI\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3MDB\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: []
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures
[DEBUG] User defined features:[]
[DEBUG] Features detected by binary scanner which are not in server.xml[ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
...
...
...
[DEBUG] Created file C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\configDropins\\overrides\\generated-features.xml
[INFO] Generated the following features: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
```
Generate features after change to `acme-ee-main\\AcmeCommon\\src\\main\\java\\com\\ibm\\wssvt\\acme\\common\\bean\\StringConfigrable.java`
**FeatureModifiedException did not occur in this instance**
```
[DEBUG] Calling binary-app-scanner-22.0.0.2-SNAPSHOT.jar with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeCommon\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
...
...
...
[DEBUG] Parsing the server file for features and includes: config\\server.xml
[DEBUG] User defined features:null
[DEBUG] Features detected by binary scanner which are not in server.xml[ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
[DEBUG] Xml document we'll try to update after generate features doc=io.openliberty.tools.common.plugins.config.ServerConfigXmlDocument@63e7de46 file=C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\server.xml
...
...
...
[DEBUG] Created file C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\configDropins\\overrides\\generated-features.xml
[INFO] Generated the following features: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
```
Generate features after change to `acme-ee-main\\AcmeAnnuityJAXRSCommon\\src\\main\\java\\com\\ibm\\wssvt\\acme\\annuity\\common\\business\\jaxrs\\MyJAXBResolver.java`
```
[DEBUG] Calling binary-app-scanner-22.0.0.2-SNAPSHOT.jar with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSCommon\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] Recalling binary scanner with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m2\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m1\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityPojoWSes\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityWeb\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS_CDI\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3MDB\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: []
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] Parsing the server file for features and includes: config\\server.xml
[DEBUG] FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures
[DEBUG] Parsing the server file for features and includes: config\\server.xml
[DEBUG] User defined features:null
[DEBUG] Features detected by binary scanner which are not in server.xml[ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
[DEBUG] Xml document we'll try to update after generate features doc=io.openliberty.tools.common.plugins.config.ServerConfigXmlDocument@617cafc6 file=C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\server.xml
...
...
...
[DEBUG] Created file C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\configDropins\\overrides\\generated-features.xml
[INFO] Generated the following features: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
```
Generate features after change to `acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\src\\main\\java\\com\\ibm\\wssvt\\acme\\annuity\\business\\impl\\jaxrs\\AnnuityMgmtSvcJAXRS.java`
```
[DEBUG] Calling binary-app-scanner-22.0.0.2-SNAPSHOT.jar with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] Recalling binary scanner with the following inputs...
binaryInputs: [C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m2\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs11_m1\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSWSes_jaxrs20\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityJAXRSCommon\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityPojoWSes\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityWeb\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3JAXWS_CDI\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3MDB\\target\\classes, C:\\Projects\\acme-ee-main\\AcmeAnnuityEJB3\\target\\classes]
targetJavaEE: ee7
targetMicroP: null
currentFeatures: []
logLocation: C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\target
logLevel: *=FINE
locale: en_CA
[DEBUG] Parsing the server file for features and includes: config\\server.xml
[DEBUG] FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures
[DEBUG] Parsing the server file for features and includes: config\\server.xml
[DEBUG] User defined features:null
[DEBUG] Features detected by binary scanner which are not in server.xml[ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
[DEBUG] Xml document we'll try to update after generate features doc=io.openliberty.tools.common.plugins.config.ServerConfigXmlDocument@ba05f301 file=C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\server.xml
...
...
...
[DEBUG] Created file C:\\Projects\\acme-ee-main\\AcmeWebEjbEar\\src\\main\\liberty\\config\\configDropins\\overrides\\generated-features.xml
[INFO] Generated the following features: [ejbLite-3.2, servlet-3.1, beanValidation-1.1, jndi-1.0, jaxws-2.2, jms-2.0, samlWeb-2.0, appSecurity-2.0, jaxrs-2.0, socialLogin-1.0, ejbRemote-3.2, jaxb-2.2, openidConnectServer-1.0, mdb-3.2, cdi-1.2, jpa-2.1]
```
Full debug output: [debugOutput.txt](https://github.com/OpenLiberty/ci.maven/files/8584560/debugOutput.txt)
| 3ac9f7471d908f25776044e63821800b6a982cea | 2271d29bb9905cfca8c705a0b49c826d628511fa | https://github.com/openliberty/ci.maven/compare/3ac9f7471d908f25776044e63821800b6a982cea...2271d29bb9905cfca8c705a0b49c826d628511fa | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
index dea8e295..52bb2cc3 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
@@ -200,16 +200,20 @@ public class GenerateFeaturesMojo extends ServerFeatureSupport {
Set<String> userFeatures = (optimize) ? existingFeatures :
getServerFeatures(servUtil, generatedFiles, true); // user features excludes generatedFiles
Set<String> modifiedSet = featuresModified.getFeatures(); // a set that works after being modified by the scanner
-
if (modifiedSet.containsAll(userFeatures)) {
// none of the user features were modified, only features which were generated earlier.
- log.debug("FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures");
+ log.debug(
+ "FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures");
+ // features were modified to get a working set with the application's API usage, display warning to users and use modified set
+ log.warn(featuresModified.getMessage());
scannedFeatureList = modifiedSet;
} else {
Set<String> allAppFeatures = featuresModified.getSuggestions(); // suggestions are scanned from binaries
allAppFeatures.addAll(userFeatures); // scanned plus configured features were detected to be in conflict
log.debug("FeatureModifiedException, combine suggestions from scanner with user features in error msg");
- throw new MojoExecutionException(String.format(BinaryScannerUtil.BINARY_SCANNER_CONFLICT_MESSAGE1, allAppFeatures, modifiedSet));
+ throw new MojoExecutionException(
+ String.format(BinaryScannerUtil.BINARY_SCANNER_CONFLICT_MESSAGE1, allAppFeatures, modifiedSet));
+
}
} catch (BinaryScannerUtil.RecommendationSetException showRecommendation) {
if (showRecommendation.isExistingFeaturesConflict()) { | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 564,762 | 111,004 | 14,643 | 123 | 821 | 138 | 10 | 1 | 11,255 | 715 | 4,125 | 122 | 3 | 4 | 1970-01-01T00:27:31 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
936 | openliberty/ci.maven/588/584 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/584 | https://github.com/OpenLiberty/ci.maven/pull/588 | https://github.com/OpenLiberty/ci.maven/pull/588 | 1 | fixes | Issuing using `serverXmlFile` config parm in dev mode; parms with 'alias' don't propagate via mojo executor | Using a config parm like:
`<configFile>src/main/liberty/config/server.xml</configFile>`
leads to this failure in dev mode:
> [ERROR] Failed to execute goal io.openliberty.tools:liberty-maven-plugin:3.0.1:dev (default-cli) on project demo: Unable to execute mojo: Unable to parse configuration of mojo io.openliberty.tools:liberty-maven-plugin:3.0.1:create for parameter configFile: Cannot find 'configFile' in class io.openliberty.tools.maven.server.CreateServerMojo -> [Help 1]
The problem seems to be either a limitation (or bug?) with mojo-executor for parameters defined with an alias. The alias value cannot be used in the way it's being used, it seems.
So the parameter with alias is defined here:
public class StartDebugMojoSupport extends BasicSupport {
@Parameter(alias="configFile", property = "serverXmlFile")
protected File serverXmlFile;
and DevMojo is using the alias like this:
elements.add(element(name("configFile"), serverXmlFile.getCanonicalPath()));
A simple change to use the regular (non-alias property name) seems to fix this:
elements.add(element(name("configFile"), serverXmlFile.getCanonicalPath()));
--------
It suggests there could be other issues to at least watch out for in the code, if not to fix now.
E.g. there is at least one other alias:
@Parameter(property = "deployPackages", defaultValue = "project", alias = "installAppPackages")
private String deployPackages;
| 4af2bd99885e79aed550a0a97a65985df7e76a17 | f60e03e9989a348a9667b1c8c0bc92516262c0f3 | https://github.com/openliberty/ci.maven/compare/4af2bd99885e79aed550a0a97a65985df7e76a17...f60e03e9989a348a9667b1c8c0bc92516262c0f3 | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
index d1ec8439..c12637f8 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
@@ -791,11 +791,11 @@ public class DevMojo extends StartDebugMojoSupport {
elements.add(element(name("stripVersion"), "true"));
elements.add(element(name("deployPackages"), "project"));
if (serverXmlFile != null) {
- elements.add(element(name("configFile"), serverXmlFile.getCanonicalPath()));
+ elements.add(element(name("serverXmlFile"), serverXmlFile.getCanonicalPath()));
}
} else if (goal.equals("create")) {
if (serverXmlFile != null) {
- elements.add(element(name("configFile"), serverXmlFile.getCanonicalPath()));
+ elements.add(element(name("serverXmlFile"), serverXmlFile.getCanonicalPath()));
}
if (assemblyArtifact != null) {
Element[] featureElems = new Element[4]; | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 285,287 | 55,561 | 7,857 | 62 | 413 | 66 | 4 | 1 | 1,474 | 168 | 335 | 32 | 0 | 0 | 1970-01-01T00:26:08 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
937 | openliberty/ci.maven/576/575 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/575 | https://github.com/OpenLiberty/ci.maven/pull/576 | https://github.com/OpenLiberty/ci.maven/pull/576 | 1 | fixes | Dev mode misses configuration for deploy | Dev mode includes configuration for `install-apps`, but since that is now called `deploy`, dev mode needs to set that configuration properly. | 40acae091a517cb2777a85be57c195ad73a48e54 | ea58e53a4cd7d9c3aa1002ef0cd06ae491d309d0 | https://github.com/openliberty/ci.maven/compare/40acae091a517cb2777a85be57c195ad73a48e54...ea58e53a4cd7d9c3aa1002ef0cd06ae491d309d0 | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
index d011934c..f8e5ef3b 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java
@@ -780,7 +780,7 @@ public class DevMojo extends StartDebugMojoSupport {
featureElems[i] = element(name("feature"), dependencies.get(i));
}
elements.add(element(name("features"), featureElems));
- } else if (goal.equals("install-apps")) {
+ } else if (goal.equals("deploy")) {
String appsDirectory = MavenProjectUtil.getPluginExecutionConfiguration(project,
LIBERTY_MAVEN_PLUGIN_GROUP_ID, LIBERTY_MAVEN_PLUGIN_ARTIFACT_ID, "deploy", "appsDirectory");
if (appsDirectory != null) { | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/DevMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 285,364 | 55,577 | 7,858 | 62 | 111 | 24 | 2 | 1 | 141 | 21 | 28 | 1 | 0 | 0 | 1970-01-01T00:26:08 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
933 | openliberty/ci.maven/913/909 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/909 | https://github.com/OpenLiberty/ci.maven/pull/913 | https://github.com/OpenLiberty/ci.maven/pull/913 | 1 | fixes | compile-jsp does not honor skip parameter | The compile-jsp goal does not honor the skip parameter. I think it probably should. | e01e137637b8d6c4b349a715329b96b14b0fb0be | 15e27c85f53f565462e4a470667d19c2d449e1a0 | https://github.com/openliberty/ci.maven/compare/e01e137637b8d6c4b349a715329b96b14b0fb0be...15e27c85f53f565462e4a470667d19c2d449e1a0 | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/jsp/CompileJspMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/jsp/CompileJspMojo.java
index 241a6fd8..62899ffa 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/jsp/CompileJspMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/jsp/CompileJspMojo.java
@@ -57,6 +57,11 @@ public class CompileJspMojo extends InstallFeatureSupport {
@Override
protected void doExecute() throws Exception {
+ if (skip) {
+ getLog().info("\\nSkipping compile-jsp goal.\\n");
+ return;
+ }
+
CompileJSPs compile = (CompileJSPs) ant.createTask("antlib:io/openliberty/tools/ant:compileJSPs");
if (compile == null) {
throw new IllegalStateException( | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/jsp/CompileJspMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 361,050 | 70,612 | 9,674 | 72 | 116 | 26 | 5 | 1 | 83 | 14 | 18 | 1 | 0 | 0 | 1970-01-01T00:26:37 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
932 | openliberty/ci.maven/1308/1301 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/1301 | https://github.com/OpenLiberty/ci.maven/pull/1308 | https://github.com/OpenLiberty/ci.maven/pull/1308 | 1 | fixes | In some cases blank lines are added to server.xml | The generateFeatures mojo adds a comment to server.xml indicating that a new file has been created to handle Liberty features generated by the mojo. In some cases adding that comment incorrectly adds multiple extra blank lines.
We need to find out what triggers this behaviour and either fix it or stop adding the comment until we do. | 990f2db1e14b3d69c1bd98791f6de931deb27d3e | 1a4a49dc33110088753911d5b9ce0102623d0eba | https://github.com/openliberty/ci.maven/compare/990f2db1e14b3d69c1bd98791f6de931deb27d3e...1a4a49dc33110088753911d5b9ce0102623d0eba | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
index 06428ada..ac686422 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
@@ -39,6 +39,7 @@ import org.apache.maven.project.MavenProject;
import org.xml.sax.SAXException;
import io.openliberty.tools.common.plugins.config.ServerConfigXmlDocument;
+import io.openliberty.tools.common.plugins.config.XmlDocument;
import io.openliberty.tools.common.plugins.util.InstallFeatureUtil;
import io.openliberty.tools.common.plugins.util.PluginExecutionException;
import io.openliberty.tools.maven.BasicSupport;
@@ -273,9 +274,9 @@ public class GenerateFeaturesMojo extends InstallFeatureSupport {
return;
}
try {
- if (doc.findFMComment(FEATURES_FILE_MESSAGE) == null) {
- doc.createFMComment(FEATURES_FILE_MESSAGE);
- doc.writeXMLDocument(serverXml);
+ if (doc.createFMComment(FEATURES_FILE_MESSAGE)) {
+ doc.writeXMLDocument(serverXml);
+ XmlDocument.addNewlineBeforeFirstElement(serverXml);
}
} catch (IOException | TransformerException e) {
log.debug("Exception adding comment to server.xml", e); | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 507,955 | 99,977 | 13,179 | 102 | 430 | 82 | 7 | 1 | 338 | 57 | 63 | 3 | 0 | 0 | 1970-01-01T00:27:17 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
930 | openliberty/ci.maven/1378/1355 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/1355 | https://github.com/OpenLiberty/ci.maven/pull/1378 | https://github.com/OpenLiberty/ci.maven/pull/1378 | 1 | fixes | Generate features, standalone goal with conflict throws RequiredFeatureModifiedException | Requires some investigation.
Running the generate-features goal standalone with a conflict, ie. on the demo-devmode project (https://github.com/OpenLiberty/demo-devmode/tree/main) with setting only feature specified in server.xml: `webProfile-7.0` the following exception is thrown:
com.ibm.ws.report.exceptions.RequiredFeatureModifiedException
Running the dev goal, once in the inner loop and adding the `webProfile-7.0` feature to the server.xml, the following conflict exception is thrown: https://github.com/OpenLiberty/ci.common/blob/main/src/main/java/io/openliberty/tools/common/plugins/util/BinaryScannerUtil.java#L32
Would have expected that both cases would throw the same error.
Also observed with Liberty Gradle Plugin | a595c9a9bc90e4f6a7823725ec73223f6dcac3b8 | d987dd900958a2990b9f7a7090f5f1b5e64cc721 | https://github.com/openliberty/ci.maven/compare/a595c9a9bc90e4f6a7823725ec73223f6dcac3b8...d987dd900958a2990b9f7a7090f5f1b5e64cc721 | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
index cce72745..a6eb3ddc 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
@@ -17,7 +17,6 @@ package io.openliberty.tools.maven.server;
import java.io.File;
import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -158,19 +157,11 @@ public class GenerateFeaturesMojo extends ServerFeatureSupport {
// get existing server features from source directory
ServerFeatureUtil servUtil = getServerFeatureUtil();
- servUtil.setLowerCaseFeatures(false);
Set<String> generatedFiles = new HashSet<String>();
generatedFiles.add(GENERATED_FEATURES_FILE_NAME);
- // if optimizing, ignore generated files when passing in existing features to
- // binary scanner
- Set<String> existingFeatures = servUtil.getServerFeatures(configDirectory, serverXmlFile,
- new HashMap<String, File>(), optimize ? generatedFiles : null);
- if (existingFeatures == null) {
- existingFeatures = new HashSet<String>();
- }
- servUtil.setLowerCaseFeatures(true);
+ Set<String> existingFeatures = getServerFeatures(servUtil, generatedFiles, optimize);
Set<String> scannedFeatureList = null;
try {
@@ -180,6 +171,21 @@ public class GenerateFeaturesMojo extends ServerFeatureSupport {
scannedFeatureList = binaryScannerHandler.runBinaryScanner(existingFeatures, classFiles, directories, eeVersion, mpVersion, optimize);
} catch (BinaryScannerUtil.NoRecommendationException noRecommendation) {
throw new MojoExecutionException(String.format(BinaryScannerUtil.BINARY_SCANNER_CONFLICT_MESSAGE3, noRecommendation.getConflicts()));
+ } catch (BinaryScannerUtil.FeatureModifiedException featuresModified) {
+ Set<String> userFeatures = (optimize) ? existingFeatures :
+ getServerFeatures(servUtil, generatedFiles, true); // user features excludes generatedFiles
+ Set<String> modifiedSet = featuresModified.getFeatures(); // a set that works after being modified by the scanner
+
+ if (modifiedSet.containsAll(userFeatures)) {
+ // none of the user features were modified, only features which were generated earlier.
+ log.debug("FeatureModifiedException, modifiedSet containsAll userFeatures, pass modifiedSet on to generateFeatures");
+ scannedFeatureList = modifiedSet;
+ } else {
+ Set<String> allAppFeatures = featuresModified.getSuggestions(); // suggestions are scanned from binaries
+ allAppFeatures.addAll(userFeatures); // scanned plus configured features were detected to be in conflict
+ log.debug("FeatureModifiedException, combine suggestions from scanner with user features in error msg");
+ throw new MojoExecutionException(String.format(BinaryScannerUtil.BINARY_SCANNER_CONFLICT_MESSAGE1, allAppFeatures, modifiedSet));
+ }
} catch (BinaryScannerUtil.RecommendationSetException showRecommendation) {
if (showRecommendation.isExistingFeaturesConflict()) {
throw new MojoExecutionException(String.format(BinaryScannerUtil.BINARY_SCANNER_CONFLICT_MESSAGE2, showRecommendation.getConflicts(), showRecommendation.getSuggestions()));
@@ -259,6 +265,20 @@ public class GenerateFeaturesMojo extends ServerFeatureSupport {
}
}
+ // Get the features from the server config and optionally exclude the specified config files from the search.
+ private Set<String> getServerFeatures(ServerFeatureUtil servUtil, Set<String> generatedFiles, boolean excludeGenerated) {
+ servUtil.setLowerCaseFeatures(false);
+ // if optimizing, ignore generated files when passing in existing features to
+ // binary scanner
+ Set<String> existingFeatures = servUtil.getServerFeatures(configDirectory, serverXmlFile,
+ new HashMap<String, File>(), excludeGenerated ? generatedFiles : null); // pass generatedFiles to exclude them
+ if (existingFeatures == null) {
+ existingFeatures = new HashSet<String>();
+ }
+ servUtil.setLowerCaseFeatures(true);
+ return existingFeatures;
+ }
+
/**
* Gets the binary scanner jar file from the local cache.
* Downloads it first from connected repositories such as Maven Central if a newer release is available than the cached version. | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 535,740 | 105,824 | 13,937 | 109 | 2,757 | 483 | 40 | 1 | 744 | 72 | 156 | 10 | 2 | 0 | 1970-01-01T00:27:22 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
931 | openliberty/ci.maven/1351/1350 | openliberty | ci.maven | https://github.com/OpenLiberty/ci.maven/issues/1350 | https://github.com/OpenLiberty/ci.maven/pull/1351 | https://github.com/OpenLiberty/ci.maven/pull/1351 | 1 | fixes | Check correct dependencies for Java EE version | We currently check 'io.openliberty.features' but we should check 'javax.javaee-api'
- [x] Maven
- [x] Gradle | c64cdf1dfa268bb96e98c4fc2af305789f02f34c | 44058dafe800c9b65c2f99c1def749a7774a4762 | https://github.com/openliberty/ci.maven/compare/c64cdf1dfa268bb96e98c4fc2af305789f02f34c...44058dafe800c9b65c2f99c1def749a7774a4762 | diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
index a0997a82..0cac1f55 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java
@@ -356,22 +356,17 @@ public class GenerateFeaturesMojo extends ServerFeatureSupport {
continue;
}
log.debug("getEEVersion, dep="+d.getGroupId()+":"+d.getArtifactId()+":"+d.getVersion());
- if (d.getGroupId().equals("io.openliberty.features")) {
- String id = d.getArtifactId();
- if (id.equals("javaee-7.0")) {
- return "ee7";
- } else if (id.equals("javaee-8.0")) {
+ if (d.getGroupId().equals("javax") && d.getArtifactId().equals("javaee-api")) {
+ if (d.getVersion().startsWith("8.")) {
return "ee8";
- } else if (id.equals("javaeeClient-7.0")) {
+ } else if (d.getVersion().startsWith("7.")) {
return "ee7";
- } else if (id.equals("javaeeClient-8.0")) {
- return "ee8";
- } else if (id.equals("jakartaee-8.0")) {
- return "ee8";
+ } else if (d.getVersion().startsWith("6.")) {
+ return "ee6";
}
} else if (d.getGroupId().equals("jakarta.platform") &&
d.getArtifactId().equals("jakarta.jakartaee-api") &&
- d.getVersion().equals("8.0.0")) {
+ d.getVersion().startsWith("8.")) {
return "ee8";
}
} | ['liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/server/GenerateFeaturesMojo.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 533,213 | 105,373 | 13,874 | 109 | 925 | 217 | 17 | 1 | 112 | 15 | 32 | 4 | 0 | 0 | 1970-01-01T00:27:19 | 118 | Java | {'Java': 1252613, 'Dockerfile': 4974, 'HTML': 1606, 'Shell': 473, 'Batchfile': 12} | Apache License 2.0 |
8,962 | apache/incubator-wayang/226/225 | apache | incubator-wayang | https://github.com/apache/incubator-wayang/issues/225 | https://github.com/apache/incubator-wayang/pull/226 | https://github.com/apache/incubator-wayang/pull/226 | 1 | close | Operator Distinct One value | The distinct operator just shows one value independent of the value that could have | 5840b1de654804db5d0ad9ff6333c19ec2e64492 | 54046d6561cb24408ee393a4b402fd8970b5d2bb | https://github.com/apache/incubator-wayang/compare/5840b1de654804db5d0ad9ff6333c19ec2e64492...54046d6561cb24408ee393a4b402fd8970b5d2bb | diff --git a/wayang-platforms/wayang-flink/code/main/java/org/apache/wayang/flink/compiler/KeySelectorDistinct.java b/wayang-platforms/wayang-flink/code/main/java/org/apache/wayang/flink/compiler/KeySelectorDistinct.java
index 732c7453..38d33f8e 100644
--- a/wayang-platforms/wayang-flink/code/main/java/org/apache/wayang/flink/compiler/KeySelectorDistinct.java
+++ b/wayang-platforms/wayang-flink/code/main/java/org/apache/wayang/flink/compiler/KeySelectorDistinct.java
@@ -18,6 +18,7 @@
package org.apache.wayang.flink.compiler;
+import java.io.IOException;
import org.apache.flink.api.java.functions.KeySelector;
import java.io.ByteArrayOutputStream;
@@ -36,7 +37,7 @@ public class KeySelectorDistinct<T> implements KeySelector<T, String>, Serializa
ObjectOutputStream objStream = new ObjectOutputStream(b);
objStream.writeObject(value);
return Base64.getEncoder().encodeToString(b.toByteArray());
- }finally {
+ }catch (IOException e) {
return "";
}
}
diff --git a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkDistinctOperatorTest.java b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkDistinctOperatorTest.java
index 6edb68ea..80f749f7 100644
--- a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkDistinctOperatorTest.java
+++ b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkDistinctOperatorTest.java
@@ -54,11 +54,10 @@ public class FlinkDistinctOperatorTest extends FlinkOperatorTestBase{
// Verify the outcome.
final List<Integer> result = ((DataSetChannel.Instance) outputs[0]).<Integer>provideDataSet().collect();
- for(Object e : result){
- System.out.println(e);
- }
+
Assert.assertEquals(4, result.size());
- Assert.assertEquals(Arrays.asList(0, 1, 6, 2), result);
+ result.sort((a, b) -> a - b);
+ Assert.assertEquals(Arrays.asList(0, 1, 2, 6), result);
}
}
diff --git a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkJoinOperatorTest.java b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkJoinOperatorTest.java
index d4b0e4be..75313b45 100644
--- a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkJoinOperatorTest.java
+++ b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkJoinOperatorTest.java
@@ -25,17 +25,26 @@ import org.apache.wayang.core.types.DataSetType;
import org.apache.wayang.core.types.DataUnitType;
import org.apache.wayang.flink.channels.DataSetChannel;
import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Disabled;
import java.util.Arrays;
import java.util.List;
+
//problematic
/**
* Test suite for {@link FlinkJoinOperator}.
*/
public class FlinkJoinOperatorTest extends FlinkOperatorTestBase{
+
+ //TODO: Validate FlinkJoinOperator implementation
+ // it is required to validate the implementation of FlinkJoinOperator
+ // because trigger an exception in the test and looks like is a problem in the
+ // implementation of the implementation in the operator
+ // labels:flink,bug
@Test
+ @Disabled("until validation of implementation of the FlinkJoinOperator")
public void testExecution() throws Exception {
// Prepare test data.
DataSetChannel.Instance input0 = this.createDataSetChannelInstance(Arrays.asList(
diff --git a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkReduceByOperatorTest.java b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkReduceByOperatorTest.java
index cf68d357..63157809 100644
--- a/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkReduceByOperatorTest.java
+++ b/wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkReduceByOperatorTest.java
@@ -26,7 +26,8 @@ import org.apache.wayang.core.types.DataSetType;
import org.apache.wayang.core.types.DataUnitType;
import org.apache.wayang.flink.channels.DataSetChannel;
import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Disabled;
import java.util.Arrays;
import java.util.HashSet;
@@ -38,7 +39,14 @@ import java.util.stream.Collectors;
* Test suite for {@link FlinkReduceByOperator}.
*/
public class FlinkReduceByOperatorTest extends FlinkOperatorTestBase{
+
+ //TODO: Validate FlinkReduceByOperator implementation
+ // it is required to validate the implementation of FlinkReduceByOperator
+ // because trigger an exception in the test and looks like is a problem in the
+ // implementation of the implementation in the operator
+ // labels:flink,bug
@Test
+ @Disabled("until validation of implementation of the FlinkReduceByOperator")
public void testExecution() throws Exception {
// Prepare test data.
List<Tuple2<String, Integer>> inputList = Arrays.stream("aaabbccccdeefff".split("")) | ['wayang-platforms/wayang-flink/code/main/java/org/apache/wayang/flink/compiler/KeySelectorDistinct.java', 'wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkJoinOperatorTest.java', 'wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkDistinctOperatorTest.java', 'wayang-platforms/wayang-flink/code/test/java/org/apache/wayang/flink/operators/FlinkReduceByOperatorTest.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 3,312,628 | 665,551 | 83,674 | 644 | 82 | 17 | 3 | 1 | 83 | 14 | 14 | 1 | 0 | 0 | 1970-01-01T00:27:31 | 115 | Java | {'Java': 4320410, 'Scala': 316955, 'Python': 242747, 'Vue': 46457, 'Shell': 32312, 'HTML': 17437, 'JavaScript': 15009, 'CSS': 13475, 'Groovy': 7176, 'Dockerfile': 2399, 'ANTLR': 1965, 'SCSS': 1094, 'Ruby': 1089} | Apache License 2.0 |
906 | mzmine/mzmine3/625/575 | mzmine | mzmine3 | https://github.com/mzmine/mzmine3/issues/575 | https://github.com/mzmine/mzmine3/pull/625 | https://github.com/mzmine/mzmine3/pull/625 | 1 | fixes | [Bug] Ion Identity: Cannot import custom adducts/modifications list from csv | ### Basic information
* My operating system: MacOS 12.3
* My MZmine version: 3.0.0-beta
### What happened
* Please describe what happened and how it differs from what you expected to happen.
I tried importing a custom adducts.csv file but it does not load desired adducts.
* Please include all parameter values for the MZmine modules you used.
No parameters used, just did: `Feature list methods > Feature grouping > Add more ion identities`
You can try exporting the default list and re-importing while adding a single dummy line it does not work
* Please upload screenshots of any errors and your log/mzmine.log file.
```
2022-03-07 18:24:51 WARNING io.github.mzmine.parameters.parametertypes.MultiChoiceComponent loadChoices Line length is different to header length: 5 to 5
```
* Please upload an MZmine project file that demonstrates the problem you are reporting.
NA | de589e62b0e2c57cd2e6d84f833f99b2f0d913fc | 6cc2e4606411d854b05a0bb76008ba866ff43602 | https://github.com/mzmine/mzmine3/compare/de589e62b0e2c57cd2e6d84f833f99b2f0d913fc...6cc2e4606411d854b05a0bb76008ba866ff43602 | diff --git a/src/main/java/io/github/mzmine/parameters/parametertypes/MultiChoiceComponent.java b/src/main/java/io/github/mzmine/parameters/parametertypes/MultiChoiceComponent.java
index d361e3ec4..803a86a59 100644
--- a/src/main/java/io/github/mzmine/parameters/parametertypes/MultiChoiceComponent.java
+++ b/src/main/java/io/github/mzmine/parameters/parametertypes/MultiChoiceComponent.java
@@ -1,19 +1,19 @@
/*
- * Copyright 2006-2018 The MZmine 2 Development Team
- *
- * This file is part of MZmine 2.
- *
- * MZmine 2 is free software; you can redistribute it and/or modify it under the terms of the GNU
+ * Copyright 2006-2021 The MZmine Development Team
+ *
+ * This file is part of MZmine.
+ *
+ * MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
- *
- * MZmine 2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
- * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ *
+ * MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+ * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along with MZmine 2; if not,
- * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
- * USA
+ *
+ * You should have received a copy of the GNU General Public License along with MZmine; if not,
+ * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
*/
package io.github.mzmine.parameters.parametertypes;
@@ -25,7 +25,19 @@ import io.github.mzmine.main.MZmineCore;
import io.github.mzmine.parameters.ParameterSet;
import io.github.mzmine.util.ExitCode;
import io.github.mzmine.util.StringMapParser;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.function.Supplier;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
@@ -37,37 +49,23 @@ import javafx.scene.layout.BorderPane;
import javafx.scene.layout.VBox;
import javafx.stage.FileChooser;
import org.controlsfx.control.CheckListView;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.*;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.controlsfx.control.IndexedCheckModel;
/**
+ *
*/
public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPane {
- private final Logger logger = Logger.getLogger(getClass().getName());
- // Filename extension.
- private static final FileChooser.ExtensionFilter csvFilter =
- new FileChooser.ExtensionFilter("Comma-separated values files", "*.csv");
- private final CheckListView<T> adductsView =
- new CheckListView<>(FXCollections.observableArrayList());
+ // Filename extension.
+ private static final FileChooser.ExtensionFilter csvFilter = new FileChooser.ExtensionFilter(
+ "Comma-separated values files", "*.csv");
+ private final Logger logger = Logger.getLogger(getClass().getName());
+ private final CheckListView<T> adductsView = new CheckListView<>(
+ FXCollections.observableArrayList());
private final Label lbTitle;
- private final Button addButton = new Button("Add...");
- private final Button importButton = new Button("Import...");
- private final Button exportButton = new Button("Export...");
- private final Button defaultButton = new Button("Reset");
private final VBox buttonBar = new VBox();
-
- private final List<T> defaultChoices;
-
- private StringMapParser<T> parser;
-
+ private final StringMapParser<T> parser;
/**
* Create the component.
@@ -75,45 +73,79 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
* @param choices the adduct choices.
*/
public MultiChoiceComponent(List<T> choices, List<T> defaultChoices, Supplier<T> addChoiceParam,
- StringMapParser<T> parser) {
- this(choices, defaultChoices, addChoiceParam, parser, true, true, true, true);
+ StringMapParser<T> parser) {
+ this(choices, defaultChoices, addChoiceParam, parser, true, true, true, true, true, true);
}
+
/**
* Create the component.
*
- * @param choices the adduct choices.
+ * @param choices the adduct choices.
* @param addChoiceParam usually a ParameterSet as ObjectGenerator to add new choices
*/
- public MultiChoiceComponent(List<T> choices, List<T> defaultChoices, Supplier<T> addChoiceParam, StringMapParser<T> parser,
- boolean btnAdd, boolean btnImport, boolean btnExport, boolean btnDefault) {
- this.defaultChoices = defaultChoices;
+ public MultiChoiceComponent(List<T> choices, List<T> defaultChoices, Supplier<T> addChoiceParam,
+ StringMapParser<T> parser, boolean btnToggleSelect, boolean btnClear, boolean btnAdd,
+ boolean btnImport, boolean btnExport, boolean btnDefault) {
this.parser = parser;
setChoices(choices);
buttonBar.setSpacing(10.0);
- if(btnAdd)
- buttonBar.getChildren().addAll(addButton);
- if(btnImport)
+ Button importButton = new Button("Import...");
+ if (btnImport) {
buttonBar.getChildren().addAll(importButton);
- if(btnExport)
+ }
+ Button exportButton = new Button("Export...");
+ if (btnExport) {
buttonBar.getChildren().addAll(exportButton);
- if(btnDefault)
+ }
+ Button toggleSelectButton = new Button("(De)select");
+ if (btnToggleSelect) {
+ buttonBar.getChildren().addAll(toggleSelectButton);
+ }
+ Button clearButton = new Button("Clear");
+ if (btnClear) {
+ buttonBar.getChildren().addAll(clearButton);
+ }
+ Button addButton = new Button("Add...");
+ if (btnAdd) {
+ buttonBar.getChildren().addAll(addButton);
+ }
+ Button defaultButton = new Button("Reset");
+ if (btnDefault) {
buttonBar.getChildren().addAll(defaultButton);
+ }
lbTitle = new Label("");
setTop(lbTitle);
setCenter(adductsView);
setRight(buttonBar);
+ clearButton.setTooltip(new Tooltip("Remove all items"));
+ clearButton.setOnAction(e -> adductsView.getItems().clear());
+
+ toggleSelectButton.setTooltip(new Tooltip("Toggle selection"));
+ toggleSelectButton.setOnAction(e -> {
+ final IndexedCheckModel<T> model = adductsView.getCheckModel();
+ final ObservableList<T> items = adductsView.getItems();
+ if (items.size() > 0) {
+ boolean newState = !model.isChecked(items.get(0));
+ model.clearChecks();
+ if (newState) {
+ for (T adduct : items) {
+ model.check(adduct);
+ }
+ }
+ }
+ });
+
addButton.setTooltip(new Tooltip("Add a custom choice to the set of choices"));
addButton.setOnAction(e -> {
- if(addChoiceParam==null) {
+ if (addChoiceParam == null) {
return;
}
// Show dialog.
- if(addChoiceParam instanceof ParameterSet) {
- final ParameterSet parameters = ((ParameterSet)addChoiceParam);
+ if (addChoiceParam instanceof final ParameterSet parameters) {
if (parameters.showSetupDialog(true) == ExitCode.OK) {
// Add to list of choices (if not already present).
T choice = addChoiceParam.get();
@@ -122,15 +154,14 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
currentChoices.add(choice);
}
}
- }
- else {
+ } else {
try {
T choice = addChoiceParam.get();
final Collection<T> currentChoices = adductsView.getItems();
if (!currentChoices.contains(choice)) {
currentChoices.add(choice);
}
- } catch(Exception ex) {
+ } catch (Exception ex) {
logger.warning("Cannot create new choice");
}
}
@@ -145,8 +176,9 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
final File file = fileChooser.showOpenDialog(this.getScene().getWindow());
// Select a file.
- if (file == null)
+ if (file == null) {
return;
+ }
// Read the CSV file into a string array.
String[][] csvLines = null;
@@ -159,8 +191,9 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
}
// Read the adducts data.
- if (csvLines == null)
+ if (csvLines == null) {
return;
+ }
// Load adducts from CSV data into parent choices.
loadChoices(csvLines, adductsView.getItems());
@@ -174,8 +207,9 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
fileChooser.getExtensionFilters().add(csvFilter);
final File file = fileChooser.showSaveDialog(this.getScene().getWindow());
- if (file == null)
+ if (file == null) {
return;
+ }
// Export the adducts.
try {
@@ -199,14 +233,26 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
return new ArrayList<>(adductsView.getItems());
}
+ public void setChoices(List<T> choices) {
+ adductsView.getItems().clear();
+ adductsView.getItems().addAll(choices);
+ }
+
+ @SafeVarargs
+ public final void setChoices(T... choices) {
+ adductsView.getItems().clear();
+ adductsView.getItems().addAll(choices);
+ }
+
public List<T> getValue() {
return new ArrayList<>(adductsView.getCheckModel().getCheckedItems());
}
public void setValue(List<T> newValue) {
- adductsView.getCheckModel().clearChecks();;
- for (T adduct : newValue)
+ adductsView.getCheckModel().clearChecks();
+ for (T adduct : newValue) {
adductsView.getCheckModel().check(adduct);
+ }
}
public String getTitle() {
@@ -230,46 +276,47 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
/**
* Load the adducts into the list of adduct choices.
*
- * @param lines CSV lines to parse.
+ * @param lines CSV lines to parse.
* @param choices the current adduct choices.
* @return a new list of adduct choices that includes the original choices plus any new ones found
- * by parsing the CSV lines.
+ * by parsing the CSV lines.
*/
private void loadChoices(final String[][] lines, final Collection<T> choices) {
- if(lines.length<2)
+ if (lines.length < 2) {
return;
+ }
// load a list of choices.
String[] header = lines[0];
- for (int l=1; l<lines.length; l++) {
+ for (int l = 1; l < lines.length; l++) {
String[] line = lines[l];
- if(lines.length!=header.length) {
- logger.warning("Line length is different to header length: "+line.length+" to "+header.length);
+ if (line.length != header.length) {
+ logger.warning(
+ "Line length is different to header length: " + line.length + " to " + header.length);
continue;
}
// map with values
- Map<String,String> map = new HashMap<>();
- for(int f=0; f<line.length; f++) {
+ Map<String, String> map = new HashMap<>();
+ for (int f = 0; f < line.length; f++) {
map.put(header[f], line[f]);
}
try {
// Create new choice
- if(parser!=null) {
+ if (parser != null) {
final T choice = parser.parseDataMap(map);
if (!choices.contains(choice)) {
choices.add(choice);
}
}
} catch (Exception ignored) {
- logger.warning("Cannot parse new choice: "+line);
+ logger.warning("Cannot parse new choice: " + Arrays.toString(line));
}
}
}
-
/**
* Writes the choices to a CSV file.
*
- * @param file the destination file.
+ * @param file the destination file.
* @param choices the choices to export.
* @throws IOException if there are i/o problems.
*/
@@ -287,13 +334,4 @@ public class MultiChoiceComponent<T extends StringMapParser<T>> extends BorderPa
writer.writeln(map.values().toArray(String[]::new));
}
}
-
- public void setChoices(List<T> choices) {
- adductsView.getItems().clear();
- adductsView.getItems().addAll(choices);
- }
- public void setChoices(T... choices) {
- adductsView.getItems().clear();
- adductsView.getItems().addAll(choices);
- }
}
diff --git a/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationComponent.java b/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationComponent.java
index 92f907b48..e09a27c41 100644
--- a/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationComponent.java
+++ b/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationComponent.java
@@ -30,7 +30,6 @@ import javafx.scene.layout.HBox;
/**
* A component for selecting adducts.
- *
*/
public class IonModificationComponent extends HBox {
@@ -42,26 +41,29 @@ public class IonModificationComponent extends HBox {
*
* @param choicesAdducts the adduct choices.
*/
- public IonModificationComponent(List<IonModification> choicesAdducts, List<IonModification> choicesMods) {
+ public IonModificationComponent(List<IonModification> choicesAdducts,
+ List<IonModification> choicesMods) {
setFillHeight(true);
setSpacing(5);
- adducts = new MultiChoiceComponent<>(choicesAdducts, List.of(IonModification.getDefaultValuesPos()), null,
- IonModification.H, // just any object as a parser
- false, true, true, false
- );
+ adducts = new MultiChoiceComponent<>(choicesAdducts,
+ List.of(IonModification.getDefaultValuesPos()), null, IonModification.H,
+ // just any object as a parser
+ true, true, false, true, true, false);
// add top label
adducts.setTitle("Adducts");
// add buttons
adducts.addButton("Add", new AddIonModificationAction(adducts));
adducts.addButton("Combine", new CombineESIAdductsAction(adducts));
- adducts.addButton("Reset positive", (e) -> adducts.setChoices(IonModification.getDefaultValuesPos()));
- adducts.addButton("Reset negative", (e) -> adducts.setChoices(IonModification.getDefaultValuesNeg()));
+ adducts.addButton("Reset positive",
+ (e) -> adducts.setChoices(IonModification.getDefaultValuesPos()));
+ adducts.addButton("Reset negative",
+ (e) -> adducts.setChoices(IonModification.getDefaultValuesNeg()));
- mods = new MultiChoiceComponent<IonModification>(choicesMods, List.of(IonModification.getDefaultModifications()), null,
- IonModification.H, // just any object as a parser
- false, true, true, false
- );
+ mods = new MultiChoiceComponent<>(choicesMods,
+ List.of(IonModification.getDefaultModifications()), null, IonModification.H,
+ // just any object as a parser
+ true, true, false, true, true, false);
// add top label
mods.setTitle("Modifications");
// add buttons
@@ -75,8 +77,7 @@ public class IonModificationComponent extends HBox {
public IonModification[][] getChoices() {
IonModification[] ad = adducts.getChoices().toArray(IonModification[]::new);
IonModification[] md = mods.getChoices().toArray(IonModification[]::new);
- IonModification[][] all = {ad, md};
- return all;
+ return new IonModification[][]{ad, md};
}
/**
@@ -89,8 +90,7 @@ public class IonModificationComponent extends HBox {
.toArray(IonModification[]::new);
IonModification[] md = mods.getValue().stream().filter(Objects::nonNull)
.toArray(IonModification[]::new);
- IonModification[][] all = {ad, md};
- return all;
+ return new IonModification[][]{ad, md};
}
public void setValue(final IonModification[][] values) {
diff --git a/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationParameter.java b/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationParameter.java
index 97a87a5bd..41cf5a456 100644
--- a/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationParameter.java
+++ b/src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationParameter.java
@@ -16,30 +16,6 @@
*
*/
-/*
- * Copyright 2006-2015 The MZmine 2 Development Team
- *
- * This file is part of MZmine 2.
- *
- * MZmine 2 is free software; you can redistribute it and/or modify it under the terms of the GNU
- * General Public License as published by the Free Software Foundation; either version 2 of the
- * License, or (at your option) any later version.
- *
- * MZmine 2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
- * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along with MZmine 2; if not,
- * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
- * USA
- */
-
-/*
- * Code created was by or on behalf of Syngenta and is released under the open source license in use
- * for the pre-existing code or project. Syngenta does not assert ownership or copyright any over
- * pre-existing work.
- */
-
package io.github.mzmine.parameters.parametertypes.ionidentity;
import io.github.mzmine.datamodel.identities.iontype.CombinedIonModification;
@@ -47,13 +23,16 @@ import io.github.mzmine.datamodel.identities.iontype.IonModification;
import io.github.mzmine.datamodel.identities.iontype.IonModificationType;
import io.github.mzmine.parameters.UserParameter;
import io.github.mzmine.parameters.parametertypes.MultiChoiceParameter;
-import org.w3c.dom.*;
-
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.logging.Logger;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
/**
* Adducts parameter.
@@ -65,7 +44,7 @@ public class IonModificationParameter
implements UserParameter<IonModification[][], IonModificationComponent> {
// Logger.
- private static final Logger LOG = Logger.getLogger(IonModificationParameter.class.getName());
+ private static final Logger logger = Logger.getLogger(IonModificationParameter.class.getName());
// XML tags.
private static final String MODIFICTAION_TAG = "modification_type";
@@ -165,7 +144,8 @@ public class IonModificationParameter
adducts.add(add);
} catch (NumberFormatException ex) {
// Ignore.
- LOG.warning("Illegal mass difference attribute in " + childAdduct.getNodeValue());
+ logger.warning(
+ "Illegal mass difference attribute in " + childAdduct.getNodeValue());
}
}
} | ['src/main/java/io/github/mzmine/parameters/parametertypes/MultiChoiceComponent.java', 'src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationComponent.java', 'src/main/java/io/github/mzmine/parameters/parametertypes/ionidentity/IonModificationParameter.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 10,984,385 | 2,560,723 | 312,770 | 2,043 | 10,993 | 2,489 | 262 | 3 | 909 | 136 | 209 | 26 | 0 | 1 | 1970-01-01T00:27:28 | 113 | Java | {'Java': 13983394, 'HTML': 212691, 'CSS': 76094, 'C++': 42644, 'C': 445} | MIT License |
8,566 | pgpainless/pgpainless/144/143 | pgpainless | pgpainless | https://github.com/pgpainless/pgpainless/issues/143 | https://github.com/pgpainless/pgpainless/pull/144 | https://github.com/pgpainless/pgpainless/pull/144 | 1 | fixes | addSubkey() ignores key flags | PGPainless ignores the user set key flags when adding a subkey like follows:
```
secretKey = PGPainless.modifyKeyRing(secretKey)
.addSubKey(
KeySpec.getBuilder(KeyType.ECDH(EllipticCurve._BRAINPOOLP512R1))
.withKeyFlags(KeyFlag.ENCRYPT_COMMS, KeyFlag.ENCRYPT_STORAGE)
.withDefaultAlgorithms(),
subkeyPassphrase,
protector)
.done();
```
Instead of `ENCRYPT_COMMS, ENCRYPT_STORAGE`, the subkey is carrying the flag `CERTIFY_OTHER`, as it erroneously inherits the key flags from the primary key.
This is a problem with the API. | 8fffa3079a87e88621f138f728e979ca1b235415 | 548bfff93f6794ce36891b2466f36922a0e59086 | https://github.com/pgpainless/pgpainless/compare/8fffa3079a87e88621f138f728e979ca1b235415...548bfff93f6794ce36891b2466f36922a0e59086 | diff --git a/pgpainless-core/src/main/java/org/pgpainless/key/generation/KeySpec.java b/pgpainless-core/src/main/java/org/pgpainless/key/generation/KeySpec.java
index 60639849..4ecce7be 100644
--- a/pgpainless-core/src/main/java/org/pgpainless/key/generation/KeySpec.java
+++ b/pgpainless-core/src/main/java/org/pgpainless/key/generation/KeySpec.java
@@ -42,7 +42,7 @@ public class KeySpec {
}
@Nullable
- PGPSignatureSubpacketVector getSubpackets() {
+ public PGPSignatureSubpacketVector getSubpackets() {
return subpacketGenerator != null ? subpacketGenerator.generate() : null;
}
diff --git a/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditor.java b/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditor.java
index ee8fca9f..7a86c875 100644
--- a/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditor.java
+++ b/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditor.java
@@ -167,12 +167,16 @@ public class SecretKeyRingEditor implements SecretKeyRingEditorInterface {
PGPSecretKey secretSubKey = generateSubKey(keySpec, subKeyPassphrase);
SecretKeyRingProtector subKeyProtector = PasswordBasedSecretKeyRingProtector
.forKey(secretSubKey, subKeyPassphrase);
+ PGPSignatureSubpacketVector hashedSubpackets = keySpec.getSubpackets();
+ PGPSignatureSubpacketVector unhashedSubpackets = null;
- return addSubKey(secretSubKey, subKeyProtector, secretKeyRingProtector);
+ return addSubKey(secretSubKey, hashedSubpackets, unhashedSubpackets, subKeyProtector, secretKeyRingProtector);
}
@Override
public SecretKeyRingEditorInterface addSubKey(PGPSecretKey secretSubKey,
+ PGPSignatureSubpacketVector hashedSubpackets,
+ PGPSignatureSubpacketVector unhashedSubpackets,
SecretKeyRingProtector subKeyProtector,
SecretKeyRingProtector keyRingProtector)
throws PGPException {
@@ -196,7 +200,7 @@ public class SecretKeyRingEditor implements SecretKeyRingEditorInterface {
PGPKeyRingGenerator keyRingGenerator = new PGPKeyRingGenerator(
secretKeyRing, ringDecryptor, digestCalculator, contentSignerBuilder, subKeyEncryptor);
- keyRingGenerator.addSubKey(subKeyPair);
+ keyRingGenerator.addSubKey(subKeyPair, hashedSubpackets, unhashedSubpackets);
secretKeyRing = keyRingGenerator.generateSecretKeyRing();
return this;
diff --git a/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditorInterface.java b/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditorInterface.java
index b5ce6723..8f936636 100644
--- a/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditorInterface.java
+++ b/pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditorInterface.java
@@ -25,6 +25,7 @@ import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPSecretKey;
import org.bouncycastle.openpgp.PGPSecretKeyRing;
import org.bouncycastle.openpgp.PGPSignature;
+import org.bouncycastle.openpgp.PGPSignatureSubpacketVector;
import org.pgpainless.key.OpenPgpV4Fingerprint;
import org.pgpainless.key.generation.KeySpec;
import org.pgpainless.key.protection.KeyRingProtectionSettings;
@@ -99,7 +100,10 @@ public interface SecretKeyRingEditorInterface {
SecretKeyRingProtector secretKeyRingProtector)
throws InvalidAlgorithmParameterException, NoSuchAlgorithmException, PGPException;
- SecretKeyRingEditorInterface addSubKey(PGPSecretKey subKey, SecretKeyRingProtector subKeyProtector, SecretKeyRingProtector keyRingProtector)
+ SecretKeyRingEditorInterface addSubKey(PGPSecretKey subKey,
+ PGPSignatureSubpacketVector hashedSubpackets,
+ PGPSignatureSubpacketVector unhashedSubpackets,
+ SecretKeyRingProtector subKeyProtector, SecretKeyRingProtector keyRingProtector)
throws PGPException;
/**
diff --git a/pgpainless-core/src/test/java/org/pgpainless/key/modification/AddSubKeyTest.java b/pgpainless-core/src/test/java/org/pgpainless/key/modification/AddSubKeyTest.java
index 814f0d65..bb2dca10 100644
--- a/pgpainless-core/src/test/java/org/pgpainless/key/modification/AddSubKeyTest.java
+++ b/pgpainless-core/src/test/java/org/pgpainless/key/modification/AddSubKeyTest.java
@@ -15,12 +15,14 @@
*/
package org.pgpainless.key.modification;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import java.io.IOException;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -38,6 +40,7 @@ import org.pgpainless.key.TestKeys;
import org.pgpainless.key.generation.KeySpec;
import org.pgpainless.key.generation.type.ecc.EllipticCurve;
import org.pgpainless.key.generation.type.ecc.ecdsa.ECDSA;
+import org.pgpainless.key.info.KeyRingInfo;
import org.pgpainless.key.protection.PasswordBasedSecretKeyRingProtector;
import org.pgpainless.key.protection.SecretKeyRingProtector;
import org.pgpainless.key.protection.UnlockSecretKey;
@@ -78,5 +81,8 @@ public class AddSubKeyTest {
SecretKeyRingProtector protector = SecretKeyRingProtector.unlockAllKeysWith(
Passphrase.fromPassword("subKeyPassphrase"), secretKeys);
PGPPrivateKey privateKey = UnlockSecretKey.unlockSecretKey(subKey, protector);
+
+ KeyRingInfo info = new KeyRingInfo(secretKeys);
+ assertEquals(Collections.singletonList(KeyFlag.SIGN_DATA), info.getKeyFlagsOf(subKeyId));
}
} | ['pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditor.java', 'pgpainless-core/src/main/java/org/pgpainless/key/modification/secretkeyring/SecretKeyRingEditorInterface.java', 'pgpainless-core/src/test/java/org/pgpainless/key/modification/AddSubKeyTest.java', 'pgpainless-core/src/main/java/org/pgpainless/key/generation/KeySpec.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 744,886 | 157,687 | 19,210 | 180 | 1,365 | 292 | 16 | 3 | 716 | 55 | 147 | 15 | 0 | 1 | 1970-01-01T00:27:04 | 113 | Java | {'Java': 2966256, 'Roff': 28531, 'HTML': 2502, 'Shell': 1768} | Apache License 2.0 |
8,585 | jenkinsci/code-coverage-api-plugin/703/689 | jenkinsci | code-coverage-api-plugin | https://github.com/jenkinsci/code-coverage-api-plugin/issues/689 | https://github.com/jenkinsci/code-coverage-api-plugin/pull/703 | https://github.com/jenkinsci/code-coverage-api-plugin/pull/703 | 1 | fixes | Computing Negative Deltas Doesn't Seem to Work | ### Jenkins and plugins versions report
Code Coverage API Version 4.4.0
### What Operating System are you using (both controller, and any agents involved in the problem)?
Linux Ubuntu Agent and Controller
### Reproduction steps
1. Have a project setup where code coverage is dropping when a pull request is opened.
2. Run `recordCoverage` with the following configuration
```
recordCoverage(
checksAnnotationScope: 'MODIFIED_LINES', checksName: 'Code Coverage',
failOnError: true, name: 'Code Coverage',
qualityGates: [
[baseline: 'PROJECT_DELTA', metric: 'LINE', threshold: -1.0, criticality: 'UNSTABLE'],
[baseline: 'PROJECT_DELTA', metric: 'BRANCH', threshold: -1.0, criticality: 'UNSTABLE']
],
sourceDirectories: [[path: 'src']],
scm: 'code-repo', sourceCodeRetention: 'LAST_BUILD',
tools: [[parser: 'JACOCO', pattern: '**/reports/jacoco/**/jacocoTestReport.xml']]
)
```
### Expected Results
```
13:21:49 [Coverage] Evaluating quality gates
13:21:49 [Coverage] -> Some quality gates have been missed: overall result is UNSTABLE
13:21:49 [Coverage] -> Details for each quality gate:
13:21:49 [Coverage] -> [Overall project (difference to reference job) - Line Coverage]: ≪Success≫ - (Actual value: +0.33%, Quality gate: -1.00)
13:21:49 [Coverage] -> [Overall project (difference to reference job) - Branch Coverage]: ≪Unstable≫ - (Actual value: -1.37%, Quality gate: -1.00)
```
### Actual Results
```
13:21:49 [Coverage] Evaluating quality gates
13:21:49 [Coverage] -> All quality gates have been passed
13:21:49 [Coverage] -> Details for each quality gate:
13:21:49 [Coverage] -> [Overall project (difference to reference job) - Line Coverage]: ≪Success≫ - (Actual value: +0.33%, Quality gate: -1.00)
13:21:49 [Coverage] -> [Overall project (difference to reference job) - Branch Coverage]: ≪Success≫ - (Actual value: -1.37%, Quality gate: -1.00)
```
### Anything else?
To add a bit more information, as I'm not sure if this is a bug or expected behaviour that negative threshold delta values aren't checked properly. I'm wondering if there's something I'm missing, as it seems like the plugin should be able to accommodate something like this.
My use case is to prevent a Delta drop in the quality gate by more than 1%. It seems like providing a `-` threshold value doesn't do what I'd expect it to. I expect that any threshold value that's < -1 would fail; in my case the value is `-1.37`.
The inverse case of using positive threshold values works as expected, but doesn't fit my use case. If I were to use `1` instead of `-1`, both my quality gates would fail as the `Line Coverage` < 1. | 74a32a770930fd54edc7398b8b37c6b1e61bd2bd | 2c5fefa4a661af731c97c8b3f3cc802a16da9226 | https://github.com/jenkinsci/code-coverage-api-plugin/compare/74a32a770930fd54edc7398b8b37c6b1e61bd2bd...2c5fefa4a661af731c97c8b3f3cc802a16da9226 | diff --git a/plugin/src/main/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluator.java b/plugin/src/main/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluator.java
index 146213f9..082ce456 100644
--- a/plugin/src/main/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluator.java
+++ b/plugin/src/main/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluator.java
@@ -3,6 +3,13 @@ package io.jenkins.plugins.coverage.metrics.steps;
import java.util.Collection;
import java.util.Locale;
+import org.apache.commons.lang3.math.Fraction;
+
+import edu.hm.hafner.coverage.FractionValue;
+import edu.hm.hafner.coverage.Metric;
+import edu.hm.hafner.coverage.SafeFraction;
+import edu.hm.hafner.coverage.Value;
+
import io.jenkins.plugins.coverage.metrics.model.CoverageStatistics;
import io.jenkins.plugins.coverage.metrics.model.ElementFormatter;
import io.jenkins.plugins.util.QualityGateEvaluator;
@@ -15,10 +22,13 @@ import io.jenkins.plugins.util.QualityGateStatus;
* @author Johannes Walter
*/
class CoverageQualityGateEvaluator extends QualityGateEvaluator<CoverageQualityGate> {
+ private static final Fraction HUNDRED = Fraction.getFraction("100.0");
+
private static final ElementFormatter FORMATTER = new ElementFormatter();
private final CoverageStatistics statistics;
- CoverageQualityGateEvaluator(final Collection<? extends CoverageQualityGate> qualityGates, final CoverageStatistics statistics) {
+ CoverageQualityGateEvaluator(final Collection<? extends CoverageQualityGate> qualityGates,
+ final CoverageStatistics statistics) {
super(qualityGates);
this.statistics = statistics;
@@ -29,14 +39,41 @@ class CoverageQualityGateEvaluator extends QualityGateEvaluator<CoverageQualityG
var baseline = qualityGate.getBaseline();
var possibleValue = statistics.getValue(baseline, qualityGate.getMetric());
if (possibleValue.isPresent()) {
- var actualValue = possibleValue.get();
+ var actualValue = convertActualValue(possibleValue.get());
var status = actualValue.isOutOfValidRange(
qualityGate.getThreshold()) ? qualityGate.getStatus() : QualityGateStatus.PASSED;
- result.add(qualityGate, status, FORMATTER.format(actualValue, Locale.ENGLISH));
+ result.add(qualityGate, status, FORMATTER.format(possibleValue.get(), Locale.ENGLISH));
}
else {
result.add(qualityGate, QualityGateStatus.INACTIVE, "n/a");
}
}
+
+ /**
+ * Converts the actual value to a percentage if necessary. Delta values are internally stored as fractions, but
+ * users expect percentages when they are displayed or used in thresholds.
+ *
+ * @param value
+ * the actual stored value
+ *
+ * @return the converted value
+ */
+ private Value convertActualValue(final Value value) {
+ var metric = value.getMetric();
+ if (metric.equals(Metric.COMPLEXITY)
+ || metric.equals(Metric.COMPLEXITY_MAXIMUM)
+ || metric.equals(Metric.LOC)) {
+ return value; // ignore integer based metrics
+ }
+ if (value instanceof FractionValue) { // delta percentage
+ return new FractionValue(metric, covertToPercentage((FractionValue) value));
+ }
+
+ return value;
+ }
+
+ private Fraction covertToPercentage(final FractionValue value) {
+ return new SafeFraction(value.getFraction()).multiplyBy(HUNDRED);
+ }
}
diff --git a/plugin/src/test/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluatorTest.java b/plugin/src/test/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluatorTest.java
index 53561661..9d28b1fd 100644
--- a/plugin/src/test/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluatorTest.java
+++ b/plugin/src/test/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluatorTest.java
@@ -5,6 +5,8 @@ import java.util.Collection;
import java.util.List;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import edu.hm.hafner.coverage.Metric;
@@ -137,6 +139,51 @@ class CoverageQualityGateEvaluatorTest extends AbstractCoverageTest {
"-> [Modified files (difference to overall project) - Line Coverage]: ≪Success≫ - (Actual value: +5.00%, Quality gate: 0.00)");
}
+ @ParameterizedTest(name = "A quality gate of {0} should not be passed if the coverage drops by 10%")
+ @ValueSource(ints = {8, 1, 0, -1, -8})
+ void shouldHandleNegativeValues(final double minimum) {
+ Collection<CoverageQualityGate> qualityGates = new ArrayList<>();
+
+ qualityGates.add(new CoverageQualityGate(minimum, Metric.FILE, Baseline.PROJECT_DELTA, QualityGateCriticality.UNSTABLE));
+
+ CoverageQualityGateEvaluator evaluator = new CoverageQualityGateEvaluator(qualityGates, createStatistics());
+ QualityGateResult result = evaluator.evaluate();
+
+ assertThat(result).hasOverallStatus(QualityGateStatus.WARNING).isNotSuccessful().isNotInactive().hasMessages(
+ String.format(
+ "-> [Overall project (difference to reference job) - File Coverage]: ≪Unstable≫ - (Actual value: -10.00%%, Quality gate: %.2f)", minimum));
+ }
+
+ @ParameterizedTest(name = "A quality gate of {0} should be passed if the coverage is at 50%")
+ @ValueSource(ints = {-10, 0, 10, 50})
+ void shouldPassAllThresholds(final double minimum) {
+ Collection<CoverageQualityGate> qualityGates = new ArrayList<>();
+
+ qualityGates.add(new CoverageQualityGate(minimum, Metric.LINE, Baseline.PROJECT, QualityGateCriticality.UNSTABLE));
+
+ CoverageQualityGateEvaluator evaluator = new CoverageQualityGateEvaluator(qualityGates, createStatistics());
+ QualityGateResult result = evaluator.evaluate();
+
+ assertThat(result).hasOverallStatus(QualityGateStatus.PASSED).isSuccessful().isNotInactive().hasMessages(
+ String.format(
+ "-> [Overall project - Line Coverage]: ≪Success≫ - (Actual value: 50.00%%, Quality gate: %.2f)", minimum));
+ }
+
+ @ParameterizedTest(name = "A quality gate of {0} should not be passed if the coverage is at 50%")
+ @ValueSource(ints = {51, 60, 70, 200})
+ void shouldFailAllThresholds(final double minimum) {
+ Collection<CoverageQualityGate> qualityGates = new ArrayList<>();
+
+ qualityGates.add(new CoverageQualityGate(minimum, Metric.LINE, Baseline.PROJECT, QualityGateCriticality.UNSTABLE));
+
+ CoverageQualityGateEvaluator evaluator = new CoverageQualityGateEvaluator(qualityGates, createStatistics());
+ QualityGateResult result = evaluator.evaluate();
+
+ assertThat(result).hasOverallStatus(QualityGateStatus.WARNING).isNotSuccessful().isNotInactive().hasMessages(
+ String.format(
+ "-> [Overall project - Line Coverage]: ≪Unstable≫ - (Actual value: 50.00%%, Quality gate: %.2f)", minimum));
+ }
+
@Test
void shouldReportUnstableIfLargerThanThreshold() {
Collection<CoverageQualityGate> qualityGates = new ArrayList<>();
@@ -155,7 +202,7 @@ class CoverageQualityGateEvaluatorTest extends AbstractCoverageTest {
}
@Test
- void shouldReportUnstableIfWorseAndSuccessIfBetter2() {
+ void shouldReportUnstableIfWorseAndSuccessIfLargerThanThreshold() {
Collection<CoverageQualityGate> qualityGates = new ArrayList<>();
var minimum = 0; | ['plugin/src/test/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluatorTest.java', 'plugin/src/main/java/io/jenkins/plugins/coverage/metrics/steps/CoverageQualityGateEvaluator.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 797,920 | 160,174 | 21,824 | 142 | 1,908 | 377 | 43 | 1 | 2,749 | 379 | 713 | 52 | 0 | 3 | 1970-01-01T00:28:05 | 106 | Java | {'Java': 1412733, 'JavaScript': 52828, 'HTML': 46388, 'XSLT': 19148, 'CSS': 5275, 'Shell': 1119} | MIT License |
173 | gradle/native-platform/258/259 | gradle | native-platform | https://github.com/gradle/native-platform/issues/259 | https://github.com/gradle/native-platform/pull/258 | https://github.com/gradle/native-platform/pull/258 | 1 | fixes | Stopping the watching causes a `timeout value is negative` exception | When we try to stop the watching, it is currently possible to end up with a negative timeout when trying to join the watcher thread. When this happens, we `join` method throws an `IllegalArgumentException`.
We should that the timeout to join the thread is always positive. This is where we stop watching: https://github.com/gradle/native-platform/blob/master/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java#L220-L232
And here is a stacktrace from a build failure on the Gradle side:
```
Caused by: java.lang.IllegalArgumentException: timeout value is negative
at net.rubygrapefruit.platform.internal.jni.AbstractFileEventFunctions$NativeFileWatcher.awaitTermination(AbstractFileEventFunctions.java:227)
at org.gradle.internal.watch.registry.impl.DefaultFileWatcherRegistry.close(DefaultFileWatcherRegistry.java:198)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.stopWatchingAndInvalidateHierarchy(WatchingVirtualFileSystem.java:357)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.withWatcherChangeErrorHandling(WatchingVirtualFileSystem.java:323)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.withWatcherChangeErrorHandling(WatchingVirtualFileSystem.java:312)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.updateRootNotifyingWatchers(WatchingVirtualFileSystem.java:90)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.lambda$update$0(WatchingVirtualFileSystem.java:81)
at org.gradle.internal.vfs.impl.VfsRootReference.update(VfsRootReference.java:40)
at org.gradle.internal.watch.vfs.impl.WatchingVirtualFileSystem.update(WatchingVirtualFileSystem.java:81)
at org.gradle.internal.vfs.impl.DefaultFileSystemAccess.record(DefaultFileSystemAccess.java:190)
at org.gradle.caching.internal.controller.impl.DefaultBuildCacheCommandFactory$LoadCommand.lambda$snapshotUnpackedData$2(DefaultBuildCacheCommandFactory.java:144)
at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$TaskExecution.visitOutputTrees(ExecuteActionsTaskExecuter.java:345)
at org.gradle.caching.internal.controller.impl.DefaultBuildCacheCommandFactory$LoadCommand.snapshotUnpackedData(DefaultBuildCacheCommandFactory.java:122)
at org.gradle.caching.internal.controller.impl.DefaultBuildCacheCommandFactory$LoadCommand.load(DefaultBuildCacheCommandFactory.java:95)
at org.gradle.caching.internal.controller.DefaultBuildCacheController$Unpack$1.run(DefaultBuildCacheController.java:149)
at org.gradle.internal.operations.DefaultBuildOperationRunner$1.execute(DefaultBuildOperationRunner.java:29)
at org.gradle.internal.operations.DefaultBuildOperationRunner$1.execute(DefaultBuildOperationRunner.java:26)
at org.gradle.internal.operations.DefaultBuildOperationRunner$3.execute(DefaultBuildOperationRunner.java:75)
at org.gradle.internal.operations.DefaultBuildOperationRunner$3.execute(DefaultBuildOperationRunner.java:68)
at org.gradle.internal.operations.DefaultBuildOperationRunner.execute(DefaultBuildOperationRunner.java:153)
at org.gradle.internal.operations.DefaultBuildOperationRunner.execute(DefaultBuildOperationRunner.java:68)
at org.gradle.internal.operations.DefaultBuildOperationRunner.run(DefaultBuildOperationRunner.java:56)
at org.gradle.internal.operations.DefaultBuildOperationExecutor.lambda$run$1(DefaultBuildOperationExecutor.java:71)
at org.gradle.internal.operations.UnmanagedBuildOperationWrapper.runWithUnmanagedSupport(UnmanagedBuildOperationWrapper.java:45)
at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:71)
at org.gradle.caching.internal.controller.DefaultBuildCacheController$Unpack.execute(DefaultBuildCacheController.java:145)
at org.gradle.caching.internal.controller.DefaultBuildCacheController$Unpack.execute(DefaultBuildCacheController.java:134)
at org.gradle.caching.local.internal.DirectoryBuildCacheService.loadInsideLock(DirectoryBuildCacheService.java:117)
at org.gradle.caching.local.internal.DirectoryBuildCacheService.access$200(DirectoryBuildCacheService.java:42)
at org.gradle.caching.local.internal.DirectoryBuildCacheService$1.run(DirectoryBuildCacheService.java:99)
at org.gradle.internal.Factories$1.create(Factories.java:26)
at org.gradle.cache.internal.LockOnDemandCrossProcessCacheAccess.withFileLock(LockOnDemandCrossProcessCacheAccess.java:90)
at org.gradle.cache.internal.DefaultCacheAccess.withFileLock(DefaultCacheAccess.java:196)
at org.gradle.cache.internal.DefaultPersistentDirectoryStore.withFileLock(DefaultPersistentDirectoryStore.java:182)
at org.gradle.cache.internal.DefaultCacheFactory$ReferenceTrackingCache.withFileLock(DefaultCacheFactory.java:206)
at org.gradle.caching.local.internal.DirectoryBuildCacheService.loadLocally(DirectoryBuildCacheService.java:94)
at org.gradle.caching.internal.controller.service.DefaultLocalBuildCacheServiceHandle.load(DefaultLocalBuildCacheServiceHandle.java:49)
at org.gradle.caching.internal.controller.DefaultBuildCacheController.load(DefaultBuildCacheController.java:100)
at org.gradle.internal.execution.steps.CacheStep.lambda$executeWithCache$0(CacheStep.java:78)
at org.gradle.internal.Try.ofFailable(Try.java:39)
``` | 6b26ec90012cc7fc0670c64a72c6274f7dfab8fd | fbaaf84713e3afe9c3a0e4dec08afc5dd671894a | https://github.com/gradle/native-platform/compare/6b26ec90012cc7fc0670c64a72c6274f7dfab8fd...fbaaf84713e3afe9c3a0e4dec08afc5dd671894a | diff --git a/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java b/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
index b777c80..7d1a10b 100644
--- a/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
+++ b/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
@@ -224,7 +224,9 @@ public abstract class AbstractFileEventFunctions implements NativeIntegration {
if (successful) {
long endTime = System.currentTimeMillis();
long remainingTimeout = timeoutInMillis - (endTime - startTime);
- processorThread.join(remainingTimeout);
+ if (remainingTimeout > 0) {
+ processorThread.join(remainingTimeout);
+ }
return !processorThread.isAlive();
} else {
return false; | ['file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 315,073 | 63,199 | 8,953 | 140 | 181 | 28 | 4 | 1 | 5,295 | 155 | 1,068 | 49 | 1 | 1 | 1970-01-01T00:26:45 | 104 | Java | {'Java': 346938, 'C++': 165077, 'Groovy': 162970, 'C': 5237, 'Kotlin': 285} | Apache License 2.0 |
172 | gradle/native-platform/261/260 | gradle | native-platform | https://github.com/gradle/native-platform/issues/260 | https://github.com/gradle/native-platform/pull/261 | https://github.com/gradle/native-platform/pull/261 | 1 | fixes | Report an error when the file watching thread stops | When `executeRunLoop0` throws an error in the file watcher thread, this goes unnoticed and the watching thread stops. That means that the watcher does not look for changes any more, even though it did not notify the client about this. This causes changes to go undetected, as for example shown in this Gradle issue: https://github.com/gradle/gradle/issues/15115. | 02fe1981f03befe54e48824e46c7d9bccb680009 | 2968564cb7a97e822d0ac4a68f087e1427caf3d2 | https://github.com/gradle/native-platform/compare/02fe1981f03befe54e48824e46c7d9bccb680009...2968564cb7a97e822d0ac4a68f087e1427caf3d2 | diff --git a/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java b/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
index 7d1a10b..7db30b1 100644
--- a/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
+++ b/file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java
@@ -64,8 +64,9 @@ public abstract class AbstractFileEventFunctions implements NativeIntegration {
* @see FileWatcher#startWatching(Collection)
*/
public FileWatcher start(long startTimeout, TimeUnit startTimeoutUnit) throws InterruptedException, InsufficientResourcesForWatchingException {
- Object server = startWatcher(new NativeFileWatcherCallback(eventQueue));
- return new NativeFileWatcher(server, startTimeout, startTimeoutUnit);
+ NativeFileWatcherCallback callback = new NativeFileWatcherCallback(eventQueue);
+ Object server = startWatcher(callback);
+ return new NativeFileWatcher(server, startTimeout, startTimeoutUnit, callback);
}
protected abstract Object startWatcher(NativeFileWatcherCallback callback);
@@ -158,7 +159,7 @@ public abstract class AbstractFileEventFunctions implements NativeIntegration {
private final Thread processorThread;
private boolean shutdown;
- public NativeFileWatcher(final Object server, long startTimeout, TimeUnit startTimeoutUnit) throws InterruptedException {
+ public NativeFileWatcher(final Object server, long startTimeout, TimeUnit startTimeoutUnit, final NativeFileWatcherCallback callback) throws InterruptedException {
this.server = server;
final CountDownLatch runLoopInitialized = new CountDownLatch(1);
this.processorThread = new Thread("File watcher server") {
@@ -166,7 +167,14 @@ public abstract class AbstractFileEventFunctions implements NativeIntegration {
public void run() {
initializeRunLoop0(server);
runLoopInitialized.countDown();
- executeRunLoop0(server);
+ try {
+ executeRunLoop0(server);
+ if (!shutdown) {
+ callback.reportFailure(new FileWatcherException("File watcher server did exit without being shutdown"));
+ }
+ } catch (Throwable e) {
+ callback.reportFailure(e);
+ }
}
};
this.processorThread.setDaemon(true); | ['file-events/src/main/java/net/rubygrapefruit/platform/internal/jni/AbstractFileEventFunctions.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 315,139 | 63,211 | 8,955 | 140 | 1,157 | 175 | 16 | 1 | 362 | 55 | 82 | 1 | 1 | 0 | 1970-01-01T00:26:45 | 104 | Java | {'Java': 346938, 'C++': 165077, 'Groovy': 162970, 'C': 5237, 'Kotlin': 285} | Apache License 2.0 |
1,500 | hashgraph/hedera-mirror-node/234/195 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/195 | https://github.com/hashgraph/hedera-mirror-node/pull/234 | https://github.com/hashgraph/hedera-mirror-node/pull/234 | 1 | fixes | Record file hash mismatch with previous | **Detailed Description**
`File Hash Mismatch with previous` error still occurs even after the fix for #157.
**Actual Behavior**
1. Run record downloader
2. Run record parser
Wait until hash mismatch occurs.
**Expected Behavior**
Records download, verified and moved to valid with no errors.
**Environment:**
- Environment: staging
- Java: OpenJDK 11.0.4
- Version: 0.1.0
**Additional Context**
Logs when it happened (reverse order):
```console
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,896 WARN [main ] c.h.d.RecordFileDownloader File Hash Mismatch with previous : /var/lib/mirror-node/recordstreams/valid/2019-09-03T20_43_40.116069Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,896 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : RECORD_HASH_MISMATCH_BYPASS_UNTIL_AFTER,
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,895 TRACE [main ] c.h.d.ApplicationStatus Getting application status for : RECORD_HASH_MISMATCH_BYPASS_UNTIL_AFTER
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,894 TRACE [main ] c.h.p.RecordFileParser Read previous file hash a092fd020847f6b2206b29e7d50283c0f820e4244691eed75e4b19c301d7c97c0a7f5d1f0a24c227c84e6ee67b06b033 for file /var/lib/mirror-node/recordstreams/valid/2019-09-03T20_43_40.116069Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : LAST_VALID_DOWNLOADED_RECORD_FILE_HASH, d8b70aabf07659c3fd05492f3c27b50539e6535a51e6c3e3f747ae3a26a388ddc4eb47e2fffcbc7ff33fcbe436bc1573
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : LAST_VALID_DOWNLOADED_RECORD_FILE, 2019-09-03T20_42_00.092453Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 DEBUG [main ] c.h.d.RecordFileDownloader Verified signature file matches at least 2/3 of nodes: 2019-09-03T20_43_40.116069Z.rcd_sig
```
| 9a70800fd04d45f2183b89c83b92e3fde6bba115 | e6ec5769bd26f1aa65af6125be64418998324015 | https://github.com/hashgraph/hedera-mirror-node/compare/9a70800fd04d45f2183b89c83b92e3fde6bba115...e6ec5769bd26f1aa65af6125be64418998324015 | diff --git a/src/main/java/com/hedera/downloader/RecordFileDownloader.java b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
index 0470d34b2..5c7e570e6 100644
--- a/src/main/java/com/hedera/downloader/RecordFileDownloader.java
+++ b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
@@ -28,6 +28,7 @@ import com.hedera.utilities.Utility;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import java.io.File;
@@ -39,6 +40,7 @@ import java.util.stream.Stream;
@Log4j2
public class RecordFileDownloader extends Downloader {
+ private static final String EMPTY_HASH = Hex.encodeHexString(new byte[48]);
private final String validDir = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.RECORDS);
private final String tmpDir = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.RECORDS);
@@ -65,7 +67,6 @@ public class RecordFileDownloader extends Downloader {
// Verify signature files and download .rcd files of valid signature files
verifySigsAndDownloadRecordFiles(sigFilesMap);
- verifyValidRecordFiles(validDir);
} catch (Exception e) {
log.error("Error downloading and verifying new record files", e);
}
@@ -73,56 +74,28 @@ public class RecordFileDownloader extends Downloader {
/**
* Verify the .rcd files to see if the file Hash matches prevFileHash
- * @param fileToCheck
* @throws Exception
*/
- private void verifyValidRecordFiles(String validDir) throws Exception {
- String lastValidRcdFileName = applicationStatus.getLastValidDownloadedRecordFileName();
- String lastValidRcdFileHash = applicationStatus.getLastValidDownloadedRecordFileHash();
-
- try (Stream<Path> pathStream = Files.walk(Paths.get(validDir))) {
- List<String> fileNames = pathStream.filter(p -> Utility.isRecordFile(p.toString()))
- .filter(p -> lastValidRcdFileName.isEmpty() ||
- fileNameComparator.compare(p.toFile().getName(), lastValidRcdFileName) > 0)
- .sorted(pathComparator)
- .map(p -> p.toString()).collect(Collectors.toList());
-
- String newLastValidRcdFileName = lastValidRcdFileName;
- String newLastValidRcdFileHash = lastValidRcdFileHash;
-
- for (String rcdName : fileNames) {
- if (Utility.checkStopFile()) {
- log.info("Stop file found, stopping");
- break;
- }
- String prevFileHash = RecordFileParser.readPrevFileHash(rcdName);
- if (prevFileHash == null) {
- log.warn("Doesn't contain valid prevFileHash: {}", rcdName);
- break;
- }
- if (newLastValidRcdFileHash.isEmpty() ||
- newLastValidRcdFileHash.equals(prevFileHash) ||
- prevFileHash.equals(Hex.encodeHexString(new byte[48]))) {
- newLastValidRcdFileHash = Utility.bytesToHex(Utility.getFileHash(rcdName));
- newLastValidRcdFileName = new File(rcdName).getName();
- } else if (applicationStatus.getBypassRecordHashMismatchUntilAfter().compareTo(new File(rcdName).getName()) > 0) {
- newLastValidRcdFileName = new File(rcdName).getName();
- newLastValidRcdFileHash = Utility.bytesToHex(Utility.getFileHash(rcdName));
- } else {
- log.warn("File Hash Mismatch with previous : {}", rcdName);
- break;
- }
- }
-
- if (!newLastValidRcdFileName.equals(lastValidRcdFileName)) {
- applicationStatus.updateLastValidDownloadedRecordFileHash(newLastValidRcdFileHash);
- applicationStatus.updateLastValidDownloadedRecordFileName(newLastValidRcdFileName);
- }
+ private boolean verifyHashChain(File recordFile) throws Exception {
+ String recordPath = recordFile.getAbsolutePath();
+ String lastValidRecordFileHash = applicationStatus.getLastValidDownloadedRecordFileHash();
+ String bypassMismatch = StringUtils.defaultIfBlank(applicationStatus.getBypassRecordHashMismatchUntilAfter(), "");
+ String prevFileHash = RecordFileParser.readPrevFileHash(recordPath);
+
+ if (prevFileHash == null) {
+ log.warn("Doesn't contain valid previous file hash: {}", recordPath);
+ return false;
+ }
- } catch (Exception ex) {
- log.error("Failed to verify record files in {}", validDir, ex);
+ if (StringUtils.isBlank(lastValidRecordFileHash) || lastValidRecordFileHash.equals(prevFileHash) ||
+ EMPTY_HASH.equals(prevFileHash) || bypassMismatch.compareTo(recordFile.getName()) > 0) {
+ return true;
}
+
+ log.warn("File Hash Mismatch with previous: {}, expected {}, got {}", recordFile.getName(), lastValidRecordFileHash, prevFileHash);
+ return false;
}
+
/**
* For each group of signature Files with the same file name:
* (1) verify that the signature files are signed by corresponding node's PublicKey;
@@ -147,7 +120,7 @@ public class RecordFileDownloader extends Downloader {
}
boolean valid = false;
List<File> sigFiles = sigFilesMap.get(fileName);
-
+
// If the number of sigFiles is not greater than 2/3 of number of nodes, we don't need to verify them
if (sigFiles == null || !Utility.greaterThanSuperMajorityNum(sigFiles.size(), nodeAccountIds.size())) {
log.warn("Signature file count for {} does not exceed 2/3 of nodes", fileName);
@@ -162,18 +135,29 @@ public class RecordFileDownloader extends Downloader {
break;
}
- Pair<Boolean, File> rcdFileResult = downloadFile(DownloadType.RCD, validSigFile, tmpDir);
- File rcdFile = rcdFileResult.getRight();
- if (rcdFile != null && Utility.hashMatch(validSigFile, rcdFile)) {
- // move the file to the valid directory
- File fTo = new File(validDir + "/" + rcdFile.getName());
- if (moveFile(rcdFile, fTo)) {
- log.debug("Verified signature file matches at least 2/3 of nodes: {}", fileName);
- valid = true;
- break;
+ try {
+ Pair<Boolean, File> rcdFileResult = downloadFile(DownloadType.RCD, validSigFile, tmpDir);
+ File rcdFile = rcdFileResult.getRight();
+ if (rcdFile != null && Utility.hashMatch(validSigFile, rcdFile)) {
+ if (verifyHashChain(rcdFile)) {
+ // move the file to the valid directory
+ String name = rcdFile.getName();
+ String hash = Utility.bytesToHex(Utility.getFileHash(rcdFile.getAbsolutePath()));
+ File validFile = Paths.get(validDir, name).toFile();
+
+ if (moveFile(rcdFile, validFile)) {
+ log.debug("Verified signature file matches at least 2/3 of nodes: {}", fileName);
+ applicationStatus.updateLastValidDownloadedRecordFileHash(hash);
+ applicationStatus.updateLastValidDownloadedRecordFileName(name);
+ valid = true;
+ break;
+ }
+ }
+ } else if (rcdFile != null) {
+ log.warn("Hash of {} doesn't match the hash contained in the signature file. Will try to download a record file with same timestamp from other nodes", validSigFile);
}
- } else if (rcdFile != null) {
- log.warn("Hash of {} doesn't match the hash contained in the signature file. Will try to download a record file with same timestamp from other nodes", rcdFile);
+ } catch (Exception e) {
+ log.error("Unable to verify signature {}", validSigFile, e);
}
}
diff --git a/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV1.java b/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV1.java
index 263d61bb4..781d1fe70 100644
--- a/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV1.java
+++ b/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV1.java
@@ -72,9 +72,6 @@ public class RecordFileDownloaderTestV1 {
s3 = S3Mock.create(8001, s3Path.toString());
s3.start();
-
- when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("");
- when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("");
}
@AfterEach
@@ -163,13 +160,11 @@ public class RecordFileDownloaderTestV1 {
void hashMismatchWithPrevious() throws Exception {
when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("2019-07-01T14:12:00.000000Z.rcd");
when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("123");
- when(applicationStatus.getBypassRecordHashMismatchUntilAfter()).thenReturn("");
fileCopier.copy();
downloader.download();
assertThat(Files.walk(validPath))
.filteredOn(p -> !p.toFile().isDirectory())
- .hasSize(2)
- .allMatch(p -> Utility.isRecordFile(p.toString()));
+ .hasSize(0);
}
@Test
diff --git a/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV2.java b/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV2.java
index 39bbf0974..5576a3811 100644
--- a/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV2.java
+++ b/src/test/java/com/hedera/downloader/RecordFileDownloaderTestV2.java
@@ -72,9 +72,6 @@ public class RecordFileDownloaderTestV2 {
s3 = S3Mock.create(8001, s3Path.toString());
s3.start();
-
- when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("");
- when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("");
}
@AfterEach | ['src/test/java/com/hedera/downloader/RecordFileDownloaderTestV1.java', 'src/main/java/com/hedera/downloader/RecordFileDownloader.java', 'src/test/java/com/hedera/downloader/RecordFileDownloaderTestV2.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 263,942 | 61,198 | 7,599 | 32 | 5,063 | 1,209 | 100 | 1 | 2,032 | 192 | 724 | 29 | 0 | 1 | 1970-01-01T00:26:08 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,499 | hashgraph/hedera-mirror-node/241/195 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/195 | https://github.com/hashgraph/hedera-mirror-node/pull/241 | https://github.com/hashgraph/hedera-mirror-node/pull/241 | 1 | fixes | Record file hash mismatch with previous | **Detailed Description**
`File Hash Mismatch with previous` error still occurs even after the fix for #157.
**Actual Behavior**
1. Run record downloader
2. Run record parser
Wait until hash mismatch occurs.
**Expected Behavior**
Records download, verified and moved to valid with no errors.
**Environment:**
- Environment: staging
- Java: OpenJDK 11.0.4
- Version: 0.1.0
**Additional Context**
Logs when it happened (reverse order):
```console
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,896 WARN [main ] c.h.d.RecordFileDownloader File Hash Mismatch with previous : /var/lib/mirror-node/recordstreams/valid/2019-09-03T20_43_40.116069Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,896 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : RECORD_HASH_MISMATCH_BYPASS_UNTIL_AFTER,
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,895 TRACE [main ] c.h.d.ApplicationStatus Getting application status for : RECORD_HASH_MISMATCH_BYPASS_UNTIL_AFTER
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,894 TRACE [main ] c.h.p.RecordFileParser Read previous file hash a092fd020847f6b2206b29e7d50283c0f820e4244691eed75e4b19c301d7c97c0a7f5d1f0a24c227c84e6ee67b06b033 for file /var/lib/mirror-node/recordstreams/valid/2019-09-03T20_43_40.116069Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : LAST_VALID_DOWNLOADED_RECORD_FILE_HASH, d8b70aabf07659c3fd05492f3c27b50539e6535a51e6c3e3f747ae3a26a388ddc4eb47e2fffcbc7ff33fcbe436bc1573
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 TRACE [main ] c.h.d.ApplicationStatus Returning application status for : LAST_VALID_DOWNLOADED_RECORD_FILE, 2019-09-03T20_42_00.092453Z.rcd
Sep 03 22:00:36 psql-test-02 java[8100]: 2019-09-03 22:00:36,893 DEBUG [main ] c.h.d.RecordFileDownloader Verified signature file matches at least 2/3 of nodes: 2019-09-03T20_43_40.116069Z.rcd_sig
```
| 95e521ed47e6be33861b2798a9f0334b7c00fa6c | 1c8a093b8066b38ee92d6fd8300d1b942073d1d2 | https://github.com/hashgraph/hedera-mirror-node/compare/95e521ed47e6be33861b2798a9f0334b7c00fa6c...1c8a093b8066b38ee92d6fd8300d1b942073d1d2 | diff --git a/src/main/java/com/hedera/downloader/RecordFileDownloader.java b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
index cf5320e2d..8a6cca292 100644
--- a/src/main/java/com/hedera/downloader/RecordFileDownloader.java
+++ b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
@@ -29,6 +29,7 @@ import com.hedera.utilities.Utility;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.scheduling.annotation.Scheduled;
@@ -43,6 +44,7 @@ import java.util.stream.Stream;
@Named
public class RecordFileDownloader extends Downloader {
+ private static final String EMPTY_HASH = Hex.encodeHexString(new byte[48]);
private final String validDir = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.RECORDS);
private final String tmpDir = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.RECORDS);
@@ -68,10 +70,7 @@ public class RecordFileDownloader extends Downloader {
}
Map<String, List<File>> sigFilesMap = downloadSigFiles(DownloadType.RCD);
-
- // Verify signature files and download .rcd files of valid signature files
verifySigsAndDownloadRecordFiles(sigFilesMap);
- verifyValidRecordFiles(validDir);
} catch (Exception e) {
log.error("Error downloading and verifying new record files", e);
}
@@ -79,56 +78,28 @@ public class RecordFileDownloader extends Downloader {
/**
* Verify the .rcd files to see if the file Hash matches prevFileHash
- * @param fileToCheck
- * @throws Exception
+ * @throws Exception
*/
- private void verifyValidRecordFiles(String validDir) throws Exception {
- String lastValidRcdFileName = applicationStatus.getLastValidDownloadedRecordFileName();
- String lastValidRcdFileHash = applicationStatus.getLastValidDownloadedRecordFileHash();
-
- try (Stream<Path> pathStream = Files.walk(Paths.get(validDir))) {
- List<String> fileNames = pathStream.filter(p -> Utility.isRecordFile(p.toString()))
- .filter(p -> lastValidRcdFileName.isEmpty() ||
- fileNameComparator.compare(p.toFile().getName(), lastValidRcdFileName) > 0)
- .sorted(pathComparator)
- .map(p -> p.toString()).collect(Collectors.toList());
-
- String newLastValidRcdFileName = lastValidRcdFileName;
- String newLastValidRcdFileHash = lastValidRcdFileHash;
-
- for (String rcdName : fileNames) {
- if (Utility.checkStopFile()) {
- log.info("Stop file found, stopping");
- break;
- }
- String prevFileHash = RecordFileParser.readPrevFileHash(rcdName);
- if (prevFileHash == null) {
- log.warn("Doesn't contain valid prevFileHash: {}", rcdName);
- break;
- }
- if (newLastValidRcdFileHash.isEmpty() ||
- newLastValidRcdFileHash.equals(prevFileHash) ||
- prevFileHash.equals(Hex.encodeHexString(new byte[48]))) {
- newLastValidRcdFileHash = Utility.bytesToHex(Utility.getFileHash(rcdName));
- newLastValidRcdFileName = new File(rcdName).getName();
- } else if (applicationStatus.getBypassRecordHashMismatchUntilAfter().compareTo(new File(rcdName).getName()) > 0) {
- newLastValidRcdFileName = new File(rcdName).getName();
- newLastValidRcdFileHash = Utility.bytesToHex(Utility.getFileHash(rcdName));
- } else {
- log.warn("File Hash Mismatch with previous : {}", rcdName);
- break;
- }
- }
-
- if (!newLastValidRcdFileName.equals(lastValidRcdFileName)) {
- applicationStatus.updateLastValidDownloadedRecordFileHash(newLastValidRcdFileHash);
- applicationStatus.updateLastValidDownloadedRecordFileName(newLastValidRcdFileName);
- }
+ private boolean verifyHashChain(File recordFile) throws Exception {
+ String recordPath = recordFile.getAbsolutePath();
+ String lastValidRecordFileHash = applicationStatus.getLastValidDownloadedRecordFileHash();
+ String bypassMismatch = StringUtils.defaultIfBlank(applicationStatus.getBypassRecordHashMismatchUntilAfter(), "");
+ String prevFileHash = RecordFileParser.readPrevFileHash(recordPath);
+
+ if (prevFileHash == null) {
+ log.warn("Doesn't contain valid previous file hash: {}", recordPath);
+ return false;
+ }
- } catch (Exception ex) {
- log.error("Failed to verify record files in {}", validDir, ex);
+ if (StringUtils.isBlank(lastValidRecordFileHash) || lastValidRecordFileHash.equals(prevFileHash) ||
+ EMPTY_HASH.equals(prevFileHash) || bypassMismatch.compareTo(recordFile.getName()) > 0) {
+ return true;
}
+
+ log.warn("File Hash Mismatch with previous: {}, expected {}, got {}", recordFile.getName(), lastValidRecordFileHash, prevFileHash);
+ return false;
}
+
/**
* For each group of signature Files with the same file name:
* (1) verify that the signature files are signed by corresponding node's PublicKey;
@@ -153,7 +124,7 @@ public class RecordFileDownloader extends Downloader {
}
boolean valid = false;
List<File> sigFiles = sigFilesMap.get(fileName);
-
+
// If the number of sigFiles is not greater than 2/3 of number of nodes, we don't need to verify them
if (sigFiles == null || !Utility.greaterThanSuperMajorityNum(sigFiles.size(), nodeAccountIds.size())) {
log.warn("Signature file count for {} does not exceed 2/3 of nodes", fileName);
@@ -168,18 +139,29 @@ public class RecordFileDownloader extends Downloader {
break;
}
- Pair<Boolean, File> rcdFileResult = downloadFile(DownloadType.RCD, validSigFile, tmpDir);
- File rcdFile = rcdFileResult.getRight();
- if (rcdFile != null && Utility.hashMatch(validSigFile, rcdFile)) {
- // move the file to the valid directory
- File fTo = new File(validDir + "/" + rcdFile.getName());
- if (moveFile(rcdFile, fTo)) {
- log.debug("Verified signature file matches at least 2/3 of nodes: {}", fileName);
- valid = true;
- break;
+ try {
+ Pair<Boolean, File> rcdFileResult = downloadFile(DownloadType.RCD, validSigFile, tmpDir);
+ File rcdFile = rcdFileResult.getRight();
+ if (rcdFile != null && Utility.hashMatch(validSigFile, rcdFile)) {
+ if (verifyHashChain(rcdFile)) {
+ // move the file to the valid directory
+ String name = rcdFile.getName();
+ String hash = Utility.bytesToHex(Utility.getFileHash(rcdFile.getAbsolutePath()));
+ File validFile = Paths.get(validDir, name).toFile();
+
+ if (moveFile(rcdFile, validFile)) {
+ log.debug("Verified signature file matches at least 2/3 of nodes: {}", fileName);
+ applicationStatus.updateLastValidDownloadedRecordFileHash(hash);
+ applicationStatus.updateLastValidDownloadedRecordFileName(name);
+ valid = true;
+ break;
+ }
+ }
+ } else if (rcdFile != null) {
+ log.warn("Hash of {} doesn't match the hash contained in the signature file. Will try to download a record file with same timestamp from other nodes", rcdFile);
}
- } else if (rcdFile != null) {
- log.warn("Hash of {} doesn't match the hash contained in the signature file. Will try to download a record file with same timestamp from other nodes", rcdFile);
+ } catch (Exception e) {
+ log.error("Unable to verify signature {}", validSigFile, e);
}
}
diff --git a/src/test/java/com/hedera/downloader/RecordFileDownloaderTest.java b/src/test/java/com/hedera/downloader/RecordFileDownloaderTest.java
index d44195c88..b8700eba5 100644
--- a/src/test/java/com/hedera/downloader/RecordFileDownloaderTest.java
+++ b/src/test/java/com/hedera/downloader/RecordFileDownloaderTest.java
@@ -72,9 +72,6 @@ public class RecordFileDownloaderTest {
s3 = S3Mock.create(8001, s3Path.toString());
s3.start();
-
- when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("");
- when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("");
}
@AfterEach
@@ -190,15 +187,11 @@ public class RecordFileDownloaderTest {
final String filename = "2019-08-30T18_10_05.249678Z.rcd";
when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("2019-07-01T14:12:00.000000Z.rcd");
when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("123");
- when(applicationStatus.getBypassRecordHashMismatchUntilAfter()).thenReturn("");
fileCopier.filterFiles(filename + "*").copy(); // Skip first file with zero hash
downloader.download();
assertThat(Files.walk(validPath))
.filteredOn(p -> !p.toFile().isDirectory())
- .hasSize(1)
- .allMatch(p -> Utility.isRecordFile(p.toString()))
- .extracting(Path::getFileName)
- .contains(Paths.get(filename));
+ .hasSize(0);
}
@Test
@@ -207,7 +200,7 @@ public class RecordFileDownloaderTest {
final String filename = "2019-08-30T18_10_05.249678Z.rcd";
when(applicationStatus.getLastValidDownloadedRecordFileName()).thenReturn("2019-07-01T14:12:00.000000Z.rcd");
when(applicationStatus.getLastValidDownloadedRecordFileHash()).thenReturn("123");
- when(applicationStatus.getBypassRecordHashMismatchUntilAfter()).thenReturn("2019-07-02T00:00:00.000000Z.rcd");
+ when(applicationStatus.getBypassRecordHashMismatchUntilAfter()).thenReturn("2019-09-01T00:00:00.000000Z.rcd");
fileCopier.filterFiles(filename + "*").copy(); // Skip first file with zero hash
downloader.download();
assertThat(Files.walk(validPath)) | ['src/main/java/com/hedera/downloader/RecordFileDownloader.java', 'src/test/java/com/hedera/downloader/RecordFileDownloaderTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 251,836 | 58,166 | 7,231 | 34 | 5,186 | 1,239 | 104 | 1 | 2,032 | 192 | 724 | 29 | 0 | 1 | 1970-01-01T00:26:08 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,506 | hashgraph/hedera-mirror-node/48/42 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/42 | https://github.com/hashgraph/hedera-mirror-node/pull/48 | https://github.com/hashgraph/hedera-mirror-node/pull/48 | 1 | fixes | Crash on invalid account balance csv | Actual:
```console
2019-08-15 15:10:37,094 INFO [main ] configloader Loading configuration from ./config/config.json
2019-08-15 15:11:17,133 INFO [main ] balancelogger No balance file to parse found
2019-08-15 15:11:17,133 INFO [main ] balancelogger Last Balance processing done
2019-08-15 15:11:17,133 INFO [main ] balancelogger Balance History processing done
2019-08-15 15:12:17,209 ERROR [main ] balancelogger File ./MirrorNodeData/accountBalances/valid/foo.csv is not named as expected, should be like 2019-06-28-22-05.csv
2019-08-15 15:12:17,221 INFO [main ] balancelogger Last Balance processing done
2019-08-15 15:12:17,222 ERROR [main ] balancelogger File ./MirrorNodeData/accountBalances/valid/foo.csv is not named as expected, should be like 2019-06-28-22-05.csv
2019-08-15 15:12:17,223 ERROR [main ] filewatcher Exception : {}
java.lang.StringIndexOutOfBoundsException: begin 0, end 10, length 7
at java.lang.String.checkBoundsBeginEnd(String.java:3319) ~[?:?]
at java.lang.String.substring(String.java:1874) ~[?:?]
at com.hedera.utilities.Utility.moveFileToParsedDir(Utility.java:499) ~[classes/:?]
at com.hedera.balanceFileLogger.BalanceFileLogger.processAllFilesForHistory(BalanceFileLogger.java:162) ~[classes/:?]
at com.hedera.balanceFileLogger.BalanceFileLogger.onCreate(BalanceFileLogger.java:149) ~[classes/:?]
at com.hedera.fileWatcher.FileWatcher.watch(FileWatcher.java:61) [classes/:?]
at com.hedera.balanceFileLogger.BalanceFileLogger.main(BalanceFileLogger.java:143) [classes/:?]
```
Expected:
Reject invalid file and move on | d1acd560b9547257b96b888e6fbca33931150009 | c33a1e1ba87838e80f7f65638433484432676cc2 | https://github.com/hashgraph/hedera-mirror-node/compare/d1acd560b9547257b96b888e6fbca33931150009...c33a1e1ba87838e80f7f65638433484432676cc2 | diff --git a/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java b/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
index 0406598ed..3a5acffdc 100644
--- a/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
+++ b/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
@@ -83,7 +83,7 @@ public class BalanceFileLogger extends FileWatcher {
super(pathToWatch);
}
- static void parseFileName(File fileName) {
+ static boolean parseFileName(File fileName) {
String shortFileName = fileName.getName().replace(".csv", "");
if (shortFileName.contains("_Balances")) {
@@ -98,6 +98,7 @@ public class BalanceFileLogger extends FileWatcher {
String[] fileParts = shortFileName.split("-");
if (fileParts.length != 5) {
log.error(MARKER, "File {} is not named as expected, should be like 2019-06-28-22-05.csv", fileName);
+ return false;
} else {
Calendar c = Calendar.getInstance();
c.clear();
@@ -114,6 +115,7 @@ public class BalanceFileLogger extends FileWatcher {
fileNanos = fileTimestamp.getNano();
}
}
+ return true;
}
private static File getLatestBalancefile() throws IOException {
@@ -187,7 +189,9 @@ public class BalanceFileLogger extends FileWatcher {
return false;
}
- parseFileName(balanceFile);
+ if ( ! parseFileName(balanceFile)) {
+ return false;
+ }
PreparedStatement selectBalance = connect.prepareStatement(
"SELECT id"
@@ -314,7 +318,9 @@ public class BalanceFileLogger extends FileWatcher {
// process the file
connect = DatabaseUtilities.openDatabase(connect);
- parseFileName(balanceFile);
+ if ( ! parseFileName(balanceFile)) {
+ return;
+ }
if (connect != null) {
connect.setAutoCommit(false); | ['src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 267,059 | 61,513 | 7,531 | 30 | 421 | 67 | 12 | 1 | 1,592 | 133 | 484 | 22 | 0 | 1 | 1970-01-01T00:26:06 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,498 | hashgraph/hedera-mirror-node/286/275 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/275 | https://github.com/hashgraph/hedera-mirror-node/pull/286 | https://github.com/hashgraph/hedera-mirror-node/pull/286 | 1 | fixes | Log AWS error messages on ListObjects failures | **Detailed Description**
During upgrade, there were no files being downloaded, due to SSL/ca-cert errors that were generated at debug-level for the AWS code, but nothing was logged in the mirror-node.
The AWS error was a "trustanchors parameter must be non-empty" error.
**Actual Behavior**
**Expected Behavior**
In the event of these errors - the mirror-node should log an error level message.
**Environment:**
- Node: v0.2.0
| 0deb5e572e0a3b64d4124045ac1c43264531d8df | 1a12a0bbdcd31f3a9f183aaefd80f638a01ad4c9 | https://github.com/hashgraph/hedera-mirror-node/compare/0deb5e572e0a3b64d4124045ac1c43264531d8df...1a12a0bbdcd31f3a9f183aaefd80f638a01ad4c9 | diff --git a/src/main/java/com/hedera/configLoader/ConfigLoader.java b/src/main/java/com/hedera/configLoader/ConfigLoader.java
index 2ba652bf0..4297ab865 100644
--- a/src/main/java/com/hedera/configLoader/ConfigLoader.java
+++ b/src/main/java/com/hedera/configLoader/ConfigLoader.java
@@ -28,6 +28,7 @@ import com.google.gson.JsonSyntaxException;
import com.hedera.utilities.Utility;
import io.github.cdimascio.dotenv.Dotenv;
+import lombok.*;
import lombok.extern.log4j.Log4j2;
import java.io.*;
@@ -35,10 +36,14 @@ import java.io.*;
@Log4j2
public class ConfigLoader {
+ @Getter
+ @RequiredArgsConstructor
public static enum CLOUD_PROVIDER {
- S3,
- GCP,
- LOCAL; // Testing
+ S3("https://s3.amazonaws.com"),
+ GCP("https://storage.googleapis.com"),
+ LOCAL("http://127.0.0.1:8001"); // Testing
+
+ private final String endpoint;
}
// cloud provider, must be either S3 or GCP
diff --git a/src/main/java/com/hedera/downloader/Downloader.java b/src/main/java/com/hedera/downloader/Downloader.java
index e4fd686eb..bae8f0a31 100644
--- a/src/main/java/com/hedera/downloader/Downloader.java
+++ b/src/main/java/com/hedera/downloader/Downloader.java
@@ -20,13 +20,13 @@ package com.hedera.downloader;
*
*/
-import com.amazonaws.ClientConfiguration;
+import com.amazonaws.*;
import com.amazonaws.auth.*;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.client.builder.ExecutorFactory;
+import com.amazonaws.handlers.RequestHandler2;
import com.amazonaws.retry.PredefinedRetryPolicies;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.*;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;
@@ -46,6 +46,7 @@ import com.hedera.utilities.Utility;
import com.hederahashgraph.api.proto.java.NodeAddress;
import com.hederahashgraph.api.proto.java.NodeAddressBook;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -78,7 +79,6 @@ public abstract class Downloader {
protected static String bucketName;
protected static TransferManager xfer_mgr;
- protected static DownloaderProperties downloaderProperties;
protected static AmazonS3 s3Client;
@@ -90,8 +90,6 @@ public abstract class Downloader {
protected List<String> nodeAccountIds;
- protected static ClientConfiguration clientConfiguration;
-
protected final ApplicationStatusRepository applicationStatusRepository;
private final CommonDownloaderProperties commonProps;
@@ -373,74 +371,46 @@ public abstract class Downloader {
}
}
- protected static synchronized void setupCloudConnection(DownloaderProperties dlProps) {
+ protected static synchronized void setupCloudConnection(DownloaderProperties downloaderProperties) {
if (xfer_mgr != null) {
return;
}
- downloaderProperties = dlProps;
bucketName = ConfigLoader.getBucketName();
// Define retryPolicy
- clientConfiguration = new ClientConfiguration();
+ ClientConfiguration clientConfiguration = new ClientConfiguration();
clientConfiguration.setRetryPolicy(
PredefinedRetryPolicies.getDefaultRetryPolicyWithCustomMaxRetries(5));
clientConfiguration.setMaxConnections(downloaderProperties.getMaxConnections());
- if (ConfigLoader.getCloudProvider() == CLOUD_PROVIDER.S3) {
- if (ConfigLoader.getAccessKey().contentEquals("")) {
- s3Client = AmazonS3ClientBuilder.standard()
- .withRegion(ConfigLoader.getClientRegion())
- .withClientConfiguration(clientConfiguration)
- .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
- .build();
- } else {
- s3Client = AmazonS3ClientBuilder.standard()
- .withCredentials(new AWSStaticCredentialsProvider(
- new BasicAWSCredentials(ConfigLoader.getAccessKey(),
- ConfigLoader.getSecretKey())))
- .withRegion(ConfigLoader.getClientRegion())
- .withClientConfiguration(clientConfiguration)
- .build();
- }
- } else if (ConfigLoader.getCloudProvider() == CLOUD_PROVIDER.GCP) {
- if (ConfigLoader.getAccessKey().contentEquals("")) {
- s3Client = AmazonS3ClientBuilder.standard()
- .withEndpointConfiguration(
- new AwsClientBuilder.EndpointConfiguration(
- "https://storage.googleapis.com", ConfigLoader.getClientRegion()))
- .withClientConfiguration(clientConfiguration)
- .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
- .build();
- } else {
- s3Client = AmazonS3ClientBuilder.standard()
- .withEndpointConfiguration(
- new AwsClientBuilder.EndpointConfiguration(
- "https://storage.googleapis.com", ConfigLoader.getClientRegion()))
- .withCredentials(new AWSStaticCredentialsProvider(
- new BasicAWSCredentials(ConfigLoader.getAccessKey(),
- ConfigLoader.getSecretKey())))
- .withClientConfiguration(clientConfiguration)
- .build();
+ RequestHandler2 errorHandler = new RequestHandler2() {
+ private Logger logger = LogManager.getLogger(AmazonS3Client.class);
+
+ @Override
+ public void afterError(Request<?> request, Response<?> response, Exception e) {
+ logger.error("Error calling {} {}", request.getHttpMethod(), request.getEndpoint(), e);
}
- } else {
- s3Client = AmazonS3ClientBuilder.standard()
- .withPathStyleAccessEnabled(true)
- .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("http://localhost:8001", ConfigLoader.getClientRegion()))
- .withClientConfiguration(clientConfiguration)
- .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
- .build();
+ };
+
+ AWSCredentials awsCredentials = new AnonymousAWSCredentials();
+ if (StringUtils.isNotBlank(ConfigLoader.getAccessKey()) && StringUtils.isNotBlank(ConfigLoader.getSecretKey())) {
+ awsCredentials = new BasicAWSCredentials(ConfigLoader.getAccessKey(), ConfigLoader.getSecretKey());
}
+
+ s3Client = AmazonS3ClientBuilder.standard()
+ .withClientConfiguration(clientConfiguration)
+ .withCredentials(new AWSStaticCredentialsProvider(awsCredentials))
+ .withEndpointConfiguration(
+ new AwsClientBuilder.EndpointConfiguration(ConfigLoader.getCloudProvider().getEndpoint(), ConfigLoader.getClientRegion()))
+ .withRequestHandlers(errorHandler)
+ .build();
+
xfer_mgr = TransferManagerBuilder.standard()
- .withExecutorFactory(new ExecutorFactory() {
- @Override
- public ExecutorService newExecutor() {
- return new ThreadPoolExecutor(downloaderProperties.getCoreThreads(), downloaderProperties.getMaxThreads(),
- 120, TimeUnit.SECONDS,
- new ArrayBlockingQueue<Runnable>(downloaderProperties.getTaskQueueSize()));
- }
- })
- .withS3Client(s3Client).build();
+ .withExecutorFactory(() -> new ThreadPoolExecutor(downloaderProperties.getCoreThreads(), downloaderProperties.getMaxThreads(),
+ 120, TimeUnit.SECONDS, new ArrayBlockingQueue<>(downloaderProperties.getTaskQueueSize())))
+ .withS3Client(s3Client)
+ .build();
}
/** | ['src/main/java/com/hedera/downloader/Downloader.java', 'src/main/java/com/hedera/configLoader/ConfigLoader.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 253,257 | 58,780 | 7,316 | 40 | 4,787 | 952 | 101 | 2 | 443 | 64 | 96 | 13 | 0 | 0 | 1970-01-01T00:26:08 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,497 | hashgraph/hedera-mirror-node/71/36 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/36 | https://github.com/hashgraph/hedera-mirror-node/pull/71 | https://github.com/hashgraph/hedera-mirror-node/pull/71#issuecomment-523539740 | 1 | fixes | Don't process partial files | Due to the nature of downloading to files and processing the files in separate processes, it's possible that processing could occur on a partially downloaded file. Whether it's polling for new files manually or using java's WatcherService, it's a possibility that the S3 downloader created the file, wrote some bytes but has not finished writing all bytes by the time the file is picked up by the parsers (especially for larger files).
Possible solutions:
1. Download files to a different directory and move them once complete. Move is an atomic operation.
2. Combine downloader and parser into a single process and use S3's GetObject to download to a ByteArrayOutputStream instead of a file.
Pros to 1 is its quickest.
Pros to 2 is it's a simpler architecture and avoids the penalty of writing files and coordinating processes. Due to the serial nature of processing these file streams, the only benefit of having separate processes is for separation of failures. But parser would be effectively down anyway if there's no new files being downloaded. Con is it's more work and not sure how large files get to be loaded into memory. | 5725dde42826cc3511bae74c6cd7009d2f881fed | a1d99c88a3c98e19b1ed57d193901537a48d811c | https://github.com/hashgraph/hedera-mirror-node/compare/5725dde42826cc3511bae74c6cd7009d2f881fed...a1d99c88a3c98e19b1ed57d193901537a48d811c | diff --git a/src/main/java/com/hedera/configLoader/ConfigLoader.java b/src/main/java/com/hedera/configLoader/ConfigLoader.java
index 28f64a7ea..492a8d046 100644
--- a/src/main/java/com/hedera/configLoader/ConfigLoader.java
+++ b/src/main/java/com/hedera/configLoader/ConfigLoader.java
@@ -355,6 +355,10 @@ public class ConfigLoader {
return "";
}
+ public static String getDefaultTmpDir(OPERATION_TYPE operation) {
+ return getDefaultParseDir(operation).replace("/valid", "/tmp");
+ }
+
public static int getProxyPort() {
return proxyPort;
}
diff --git a/src/main/java/com/hedera/downloader/AccountBalancesDownloader.java b/src/main/java/com/hedera/downloader/AccountBalancesDownloader.java
index 96dfb9f75..b17abaa6a 100644
--- a/src/main/java/com/hedera/downloader/AccountBalancesDownloader.java
+++ b/src/main/java/com/hedera/downloader/AccountBalancesDownloader.java
@@ -12,13 +12,18 @@ import org.apache.commons.lang3.tuple.Pair;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
import com.hedera.configLoader.ConfigLoader;
+import com.hedera.configLoader.ConfigLoader.OPERATION_TYPE;
import com.hedera.signatureVerifier.NodeSignatureVerifier;
import com.hedera.utilities.Utility;
public class AccountBalancesDownloader extends Downloader {
+ private static String validDir = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.BALANCE);
+ private static String tmpDir = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.BALANCE);
public AccountBalancesDownloader() {
+ Utility.createDirIfNotExists(validDir);
+ Utility.createDirIfNotExists(tmpDir);
}
public static void main(String[] args) {
@@ -67,8 +72,7 @@ public class AccountBalancesDownloader extends Downloader {
* return the name of directory which contains valid _Balances.csv files
* @param sigFilesMap
*/
- String verifySigsAndDownloadBalanceFiles(Map<String, List<File>> sigFilesMap) {
- String validDir = null;
+ private void verifySigsAndDownloadBalanceFiles(Map<String, List<File>> sigFilesMap) {
String lastValidBalanceFileName = "";
try {
lastValidBalanceFileName = ConfigLoader.getLastValidBalanceFileName();
@@ -99,18 +103,21 @@ public class AccountBalancesDownloader extends Downloader {
log.info(MARKER, "Stop file found, stopping.");
break;
}
- if (validDir == null) {
- validDir = validSigFile.getParentFile().getParent() + "/valid/";
- }
- Pair<Boolean, File> fileResult = downloadBalanceFile(validSigFile, validDir);
+
+ Pair<Boolean, File> fileResult = downloadFile(DownloadType.BALANCE, validSigFile, tmpDir);
File file = fileResult.getRight();
- if (file != null &&
- Utility.hashMatch(validSigFile, file)) {
- if (newLastValidBalanceFileName.isEmpty() ||
- fileNameComparator.compare(newLastValidBalanceFileName, file.getName()) < 0) {
- newLastValidBalanceFileName = file.getName();
- }
- break;
+ if (file != null && Utility.hashMatch(validSigFile, file)) {
+
+ // move the file to the valid directory
+ File fTo = new File(validDir + file.getName());
+
+ if (moveFile(file, fTo)) {
+ if (newLastValidBalanceFileName.isEmpty() ||
+ fileNameComparator.compare(newLastValidBalanceFileName, file.getName()) < 0) {
+ newLastValidBalanceFileName = file.getName();
+ }
+ break;
+ }
} else if (file != null) {
log.warn(MARKER, "{}'s Hash doesn't match the Hash contained in valid signature file. Will try to download a balance file with same timestamp from other nodes and check the Hash.", file.getPath());
}
@@ -122,17 +129,5 @@ public class AccountBalancesDownloader extends Downloader {
ConfigLoader.setLastValidBalanceFileName(newLastValidBalanceFileName);
ConfigLoader.saveToFile();
}
- return validDir;
}
-
- Pair<Boolean, File> downloadBalanceFile(File sigFile, String validDir) {
- String nodeAccountId = Utility.getAccountIDStringFromFilePath(sigFile.getPath());
- String sigFileName = sigFile.getName();
- String balanceFileName = sigFileName.replace("_Balances.csv_sig", "_Balances.csv");
- String s3ObjectKey = "accountBalances/balance" + nodeAccountId + "/" + balanceFileName;
- String localFileName = validDir + balanceFileName;
- return saveToLocal(bucketName, s3ObjectKey, localFileName);
- }
-
-
}
diff --git a/src/main/java/com/hedera/downloader/Downloader.java b/src/main/java/com/hedera/downloader/Downloader.java
index 733cbf466..411b1c38f 100644
--- a/src/main/java/com/hedera/downloader/Downloader.java
+++ b/src/main/java/com/hedera/downloader/Downloader.java
@@ -1,6 +1,5 @@
package com.hedera.downloader;
-
import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.SdkClientException;
@@ -16,7 +15,6 @@ import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.transfer.Download;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
-import com.google.gson.JsonObject;
import com.hedera.configLoader.ConfigLoader;
import com.hedera.configLoader.ConfigLoader.CLOUD_PROVIDER;
import com.hedera.configLoader.ConfigLoader.OPERATION_TYPE;
@@ -30,11 +28,14 @@ import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.Marker;
import org.apache.logging.log4j.MarkerManager;
+import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
+
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.ArrayList;
@@ -42,7 +43,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
-import java.util.stream.Collectors;
public abstract class Downloader {
protected static final Logger log = LogManager.getLogger("downloader");
@@ -149,6 +149,7 @@ public abstract class Downloader {
* Download all balance .csv files with timestamp later than lastValidBalanceFileName
*/
+ @Deprecated
protected void downloadBalanceFiles() throws IOException {
String s3Prefix = null;
String fileType = null;
@@ -157,8 +158,7 @@ public abstract class Downloader {
s3Prefix = ConfigLoader.getAccountBalanceS3Location();
fileType = ".csv";
lastValidFileName = ConfigLoader.getLastValidBalanceFileName();
- saveFilePath = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.BALANCE);
-
+ saveFilePath = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.BALANCE);
// refresh node account ids
nodeAccountIds = loadNodeAccountIDs();
@@ -215,8 +215,11 @@ public abstract class Downloader {
count++;
File file = result.getRight();
if (file != null) {
- String fileName = file.getName();
- files.add(fileName);
+ // move the file to the valid directory
+ File fTo = new File(file.getAbsolutePath().replace("/tmp/", "/valid/") + file.getName());
+ if (moveFile(file, fTo)) {
+ files.add(file.getName());
+ }
}
} else if (result.getRight() == null) {
log.error(MARKER, "File {} failed to download from cloud", s3ObjectKey);
@@ -396,27 +399,6 @@ public abstract class Downloader {
return sigFilesMap;
}
-// /**
-// * return a pair of download result:
-// * boolean: download it or not.
-// * True means we download it successfully; False means it already exists or we fail to download it;
-// * File is the local file
-// * @param bucket_name
-// * @param s3ObjectKey
-// * @return
-// */
-// protected static Pair<Boolean, File> saveToLocal(String bucket_name,
-// String s3ObjectKey, String filePath) throws IOException {
-//
-// if (!Utility.createDirIfNotExists(filePath)) {
-// log.error(MARKER, "{} doesn't exist and we fail to create this directory", filePath);
-// return null;
-// }
-//
-// filePath += s3ObjectKey;
-// return saveToLocal(bucket_name, s3ObjectKey, filePath);
-// }
-
/**
* return a pair of download result:
* boolean: download it or not.
@@ -429,6 +411,7 @@ public abstract class Downloader {
*/
protected static Pair<Boolean, File> saveToLocal(String bucket_name,
String s3ObjectKey, String localFilepath) {
+
// ensure filePaths have OS specific separator
localFilepath = localFilepath.replace("/", "~");
localFilepath = localFilepath.replace("\\\\", "~");
@@ -436,15 +419,7 @@ public abstract class Downloader {
File f = new File(localFilepath).getAbsoluteFile();
- if (f.exists()) {
- log.info(MARKER, "File exists: " + localFilepath);
- return Pair.of(false, f);
- }
try {
- if( ! f.getParentFile().exists() ) {
- f.getParentFile().mkdirs();
- }
- //f.createNewFile();
Download download = xfer_mgr.download(bucket_name, s3ObjectKey, f);
download.waitForCompletion();
if (download.isDone()) {
@@ -455,10 +430,10 @@ public abstract class Downloader {
return Pair.of(false, null);
}
} catch (AmazonServiceException ex) {
- log.error(MARKER, "Download Fails: {}, Exception: {}", s3ObjectKey, ex);
+ log.error(MARKER, "Download Failed: {}, Exception: {}", s3ObjectKey, ex);
} catch (InterruptedException ex) {
- log.error(MARKER, "Download Fails: {}, Exception: {}", s3ObjectKey, ex);
- }
+ log.error(MARKER, "Download Failed: {}, Exception: {}", s3ObjectKey, ex);
+ }
return Pair.of(false, null);
}
@@ -517,4 +492,52 @@ public abstract class Downloader {
xfer_mgr = TransferManagerBuilder.standard()
.withS3Client(s3Client).build();
}
+
+ /**
+ * Moves a file from one location to another
+ * boolean: true if file moved successfully
+ * Note: The method doesn't check if source file or destination directory exist to avoid
+ * repeated checks that could hurt performance
+ * @param sourceFile
+ * @param destinationFile
+ * @return boolean
+ */
+ protected boolean moveFile(File sourceFile, File destinationFile) {
+ try {
+ // not checking if file exists to help with performance
+ // assumption is caller has created the destination file folder
+ Files.move(sourceFile.toPath(), destinationFile.toPath(), REPLACE_EXISTING);
+ return true;
+ } catch (IOException e) {
+ log.error(MARKER, "File Move from {} to {} Failed: {}, Exception: {}", sourceFile.getAbsolutePath(), destinationFile.getAbsolutePath(), e);
+ return false;
+ }
+ }
+
+ protected Pair<Boolean, File> downloadFile(DownloadType downloadType, File sigFile, String targetDir) {
+ String fileName = "";
+ String s3Prefix = "";
+
+ String nodeAccountId = Utility.getAccountIDStringFromFilePath(sigFile.getPath());
+ String sigFileName = sigFile.getName();
+
+ switch (downloadType) {
+ case BALANCE:
+ fileName = sigFileName.replace("_Balances.csv_sig", "_Balances.csv");
+ s3Prefix = ConfigLoader.getAccountBalanceS3Location();
+ break;
+ case EVENT:
+ fileName = sigFileName.replace(".evts_sig", ".evts");
+ s3Prefix = ConfigLoader.getEventFilesS3Location();
+ break;
+ case RCD:
+ fileName = sigFileName.replace(".rcd_sig", ".rcd");
+ s3Prefix = ConfigLoader.getRecordFilesS3Location();
+ break;
+ }
+ String s3ObjectKey = s3Prefix + nodeAccountId + "/" + fileName;
+
+ String localFileName = targetDir + "/" + fileName;
+ return saveToLocal(bucketName, s3ObjectKey, localFileName);
+ }
}
diff --git a/src/main/java/com/hedera/downloader/EventStreamFileDownloader.java b/src/main/java/com/hedera/downloader/EventStreamFileDownloader.java
index f3a91a852..4db1f5172 100644
--- a/src/main/java/com/hedera/downloader/EventStreamFileDownloader.java
+++ b/src/main/java/com/hedera/downloader/EventStreamFileDownloader.java
@@ -3,6 +3,7 @@ package com.hedera.downloader;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
import com.hedera.configLoader.ConfigLoader;
+import com.hedera.configLoader.ConfigLoader.OPERATION_TYPE;
import com.hedera.parser.EventStreamFileParser;
import com.hedera.signatureVerifier.NodeSignatureVerifier;
import com.hedera.utilities.Utility;
@@ -23,9 +24,13 @@ import java.util.stream.Stream;
public class EventStreamFileDownloader extends Downloader {
- private static String validDir = null;
+ private static String validDir = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.EVENTS);
+ private static String tmpDir = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.EVENTS);
+ private static File fileValidDir = new File(validDir);
public EventStreamFileDownloader() {
+ Utility.createDirIfNotExists(validDir);
+ Utility.createDirIfNotExists(tmpDir);
}
public static void downloadNewEventfiles(EventStreamFileDownloader downloader) {
@@ -50,7 +55,7 @@ public class EventStreamFileDownloader extends Downloader {
if (validDir != null) {
// new Thread(() -> {
- verifyValidFiles(validDir);
+ verifyValidFiles();
// }).start();
}
@@ -86,7 +91,7 @@ public class EventStreamFileDownloader extends Downloader {
*
* @param validDir
*/
- public static void verifyValidFiles(String validDir) {
+ public static void verifyValidFiles() {
String lastValidEventFileName = "";
try {
lastValidEventFileName = ConfigLoader.getLastValidEventFileName();
@@ -104,11 +109,7 @@ public class EventStreamFileDownloader extends Downloader {
}
String lastValidEventFileName2 = lastValidEventFileName;
- File validDirFile = new File(validDir);
- if (!validDirFile.exists()) {
- return;
- }
- try (Stream<Path> pathStream = Files.walk(validDirFile.toPath())) {
+ try (Stream<Path> pathStream = Files.walk(fileValidDir.toPath())) {
List<String> fileNames = pathStream.filter(p -> Utility.isEventStreamFile(p.toString()))
.filter(p -> lastValidEventFileName2.isEmpty() ||
fileNameComparator.compare(p.toFile().getName(), lastValidEventFileName2) > 0)
@@ -159,7 +160,7 @@ public class EventStreamFileDownloader extends Downloader {
*
* @param sigFilesMap
*/
- String verifySigsAndDownloadEventStreamFiles(Map<String, List<File>> sigFilesMap) {
+ private void verifySigsAndDownloadEventStreamFiles(Map<String, List<File>> sigFilesMap) {
NodeSignatureVerifier verifier = new NodeSignatureVerifier();
for (String fileName : sigFilesMap.keySet()) {
@@ -173,14 +174,16 @@ public class EventStreamFileDownloader extends Downloader {
List<File> validSigFiles = verifier.verifySignatureFiles(sigFiles);
if (validSigFiles != null) {
for (File validSigFile : validSigFiles) {
- if (validDir == null) {
- validDir = validSigFile.getParentFile().getParent() + "/valid/";
- }
- Pair<Boolean, File> fileResult = downloadFile(validSigFile, validDir);
+ Pair<Boolean, File> fileResult = downloadFile(DownloadType.EVENT, validSigFile, tmpDir);
File file = fileResult.getRight();
- if (file != null &&
- Utility.hashMatch(validSigFile, file)) {
- break;
+ if (file != null && Utility.hashMatch(validSigFile, file)) {
+ // move the file to the valid directory
+ File fTo = new File(validDir + file.getName());
+
+ if (moveFile(file, fTo)) {
+ break;
+ }
+
} else if (file != null) {
log.warn(MARKER,
"{}'s Hash doesn't match the Hash contained in valid signature file. Will try to " +
@@ -195,22 +198,6 @@ public class EventStreamFileDownloader extends Downloader {
}
}
}
- return validDir;
}
- /**
- * Download .evts file
- *
- * @param sigFile
- * @param validDir
- * @return
- */
- Pair<Boolean, File> downloadFile(File sigFile, String validDir) {
- String nodeAccountId = Utility.getAccountIDStringFromFilePath(sigFile.getPath());
- String sigFileName = sigFile.getName();
- String fileName = sigFileName.replace(".evts_sig", ".evts");
- String s3ObjectKey = ConfigLoader.getEventFilesS3Location() + nodeAccountId + "/" + fileName;
- String localFileName = validDir + fileName;
- return saveToLocal(bucketName, s3ObjectKey, localFileName);
- }
}
diff --git a/src/main/java/com/hedera/downloader/RecordFileDownloader.java b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
index 737408e96..669bb612a 100644
--- a/src/main/java/com/hedera/downloader/RecordFileDownloader.java
+++ b/src/main/java/com/hedera/downloader/RecordFileDownloader.java
@@ -3,6 +3,7 @@ package com.hedera.downloader;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
import com.hedera.configLoader.ConfigLoader;
+import com.hedera.configLoader.ConfigLoader.OPERATION_TYPE;
import com.hedera.parser.RecordFileParser;
import com.hedera.signatureVerifier.NodeSignatureVerifier;
import com.hedera.utilities.Utility;
@@ -23,10 +24,12 @@ import java.util.stream.Stream;
public class RecordFileDownloader extends Downloader {
- private static String validRcdDir = null;
- private static String s3prefix = "";
+ private static String validDir = ConfigLoader.getDefaultParseDir(OPERATION_TYPE.RECORDS);
+ private static String tmpDir = ConfigLoader.getDefaultTmpDir(OPERATION_TYPE.RECORDS);
public RecordFileDownloader() {
+ Utility.createDirIfNotExists(validDir);
+ Utility.createDirIfNotExists(tmpDir);
}
public static void downloadNewRecordfiles(RecordFileDownloader downloader) {
@@ -39,9 +42,9 @@ public class RecordFileDownloader extends Downloader {
// Verify signature files and download .rcd files of valid signature files
downloader.verifySigsAndDownloadRecordFiles(sigFilesMap);
- if (validRcdDir != null) {
+ if (validDir != null) {
// new Thread(() -> {
- verifyValidRecordFiles(validRcdDir);
+ verifyValidRecordFiles(validDir);
// }).start();
} else {
}
@@ -145,12 +148,6 @@ public class RecordFileDownloader extends Downloader {
// reload address book and keys
NodeSignatureVerifier verifier = new NodeSignatureVerifier();
- validRcdDir = null;
- s3prefix = ConfigLoader.getRecordFilesS3Location();
- if (s3prefix.endsWith("/")) {
- s3prefix = s3prefix.substring(0, s3prefix.length()-2);
- }
-
for (String fileName : sigFilesMap.keySet()) {
if (Utility.checkStopFile()) {
log.info(MARKER, "Stop file found, stopping");
@@ -169,14 +166,16 @@ public class RecordFileDownloader extends Downloader {
log.info(MARKER, "Stop file found, stopping");
break;
}
- if (validRcdDir == null) {
- validRcdDir = validSigFile.getParentFile().getParent() + "/valid/";
- }
- Pair<Boolean, File> rcdFileResult = downloadRcdFile(validSigFile, validRcdDir);
+
+ Pair<Boolean, File> rcdFileResult = downloadFile(DownloadType.RCD, validSigFile, tmpDir);
File rcdFile = rcdFileResult.getRight();
- if (rcdFile != null &&
- Utility.hashMatch(validSigFile, rcdFile)) {
- break;
+ if (rcdFile != null && Utility.hashMatch(validSigFile, rcdFile)) {
+ // move the file to the valid directory
+ File fTo = new File(validDir + "/" + rcdFile.getName());
+
+ if (moveFile(rcdFile, fTo)) {
+ break;
+ }
} else if (rcdFile != null) {
log.warn(MARKER, "{}'s Hash doesn't match the Hash contained in valid signature file. Will try to download a rcd file with same timestamp from other nodes and check the Hash.", rcdFile.getPath());
}
@@ -186,17 +185,7 @@ public class RecordFileDownloader extends Downloader {
}
}
}
- return validRcdDir;
- }
-
- Pair<Boolean, File> downloadRcdFile(File sigFile, String validRcdDir) {
- String nodeAccountId = Utility.getAccountIDStringFromFilePath(sigFile.getPath());
- String sigFileName = sigFile.getName();
- String rcdFileName = sigFileName.replace(".rcd_sig", ".rcd");
-// String s3ObjectKey = "recordstreams/record" + nodeAccountId + "/" + rcdFileName;
- String s3ObjectKey = s3prefix + nodeAccountId + "/" + rcdFileName;
-// String localFileName = validRcdDir + rcdFileName;
- return saveToLocal(bucketName, s3ObjectKey, validRcdDir + rcdFileName);
+ return validDir;
}
}
diff --git a/src/main/java/com/hedera/utilities/Utility.java b/src/main/java/com/hedera/utilities/Utility.java
index b67888b66..6f21b2bcc 100644
--- a/src/main/java/com/hedera/utilities/Utility.java
+++ b/src/main/java/com/hedera/utilities/Utility.java
@@ -45,8 +45,8 @@ public class Utility {
private static final Marker LOGM_EXCEPTION = MarkerManager.getMarker("EXCEPTION");
private static final Long SCALAR = 1_000_000_000L;
- private static final byte TYPE_PREV_HASH = 1; // next 48 bytes are hash384 of previous files
- private static final byte TYPE_RECORD = 2; // next data type is transaction and its record
+// private static final byte TYPE_PREV_HASH = 1; // next 48 bytes are hash384 of previous files
+// private static final byte TYPE_RECORD = 2; // next data type is transaction and its record
private static final byte TYPE_SIGNATURE = 3; // the file content signature, should not be hashed
private static final byte TYPE_FILE_HASH = 4; // next 48 bytes are hash384 of content of corresponding RecordFile
| ['src/main/java/com/hedera/configLoader/ConfigLoader.java', 'src/main/java/com/hedera/downloader/AccountBalancesDownloader.java', 'src/main/java/com/hedera/downloader/Downloader.java', 'src/main/java/com/hedera/downloader/EventStreamFileDownloader.java', 'src/main/java/com/hedera/downloader/RecordFileDownloader.java', 'src/main/java/com/hedera/utilities/Utility.java'] | {'.java': 6} | 6 | 6 | 0 | 0 | 6 | 266,279 | 62,008 | 7,589 | 30 | 10,678 | 2,553 | 250 | 6 | 1,142 | 190 | 224 | 9 | 0 | 0 | 1970-01-01T00:26:06 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,502 | hashgraph/hedera-mirror-node/193/192 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/192 | https://github.com/hashgraph/hedera-mirror-node/pull/193 | https://github.com/hashgraph/hedera-mirror-node/pull/193 | 1 | fixes | When address book, that is larger than a single transaction, is updated - mirror node stops processing | **Detailed Description**
Mainnet mirror node stopped processing (and could not continue processing after restart) when it encountered an address book update that was larger than a single transaction.
The address book update in mainnet was a FILEUPDATE followed by 2 FILEAPPEND operations. This broke the mirrornode and stopped all processing.
The log messages were:
```
at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:209) ~[protobuf-java-3.5.1.jar:?]
at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:214) ~[protobuf-java-3.5.1.jar:?]
at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:49) ~[protobuf-java-3.5.1.jar:?]
at com.hederahashgraph.api.proto.java.NodeAddressBook.parseFrom(NodeAddressBook.java:237) ~[hedera-protobuf-java-api-0.3.4.jar:?]
at com.hedera.signatureVerifier.NodeSignatureVerifier.<init>(NodeSignatureVerifier.java:62) [mirrorNode.jar:?]
at com.hedera.downloader.RecordFileDownloader.verifySigsAndDownloadRecordFiles(RecordFileDownloader.java:136) [mirrorNode.jar:?]
at com.hedera.downloader.RecordFileDownloader.downloadNewRecordfiles(RecordFileDownloader.java:65) [mirrorNode.jar:?]
at com.hedera.downloader.RecordFileDownloader.main(RecordFileDownloader.java:82) [mirrorNode.jar:?]
```
**Actual Behavior**
**Expected Behavior**
**Environment:**
**Additional Context** | bd4ceb2a2c334535c6cf41a6bad3d14daa1f3e82 | 245b89602df53d89689e16810282738e6ab0d754 | https://github.com/hashgraph/hedera-mirror-node/compare/bd4ceb2a2c334535c6cf41a6bad3d14daa1f3e82...245b89602df53d89689e16810282738e6ab0d754 | diff --git a/src/main/java/com/hedera/addressBook/NetworkAddressBook.java b/src/main/java/com/hedera/addressBook/NetworkAddressBook.java
index 763ac2ee5..3d075ee18 100644
--- a/src/main/java/com/hedera/addressBook/NetworkAddressBook.java
+++ b/src/main/java/com/hedera/addressBook/NetworkAddressBook.java
@@ -22,6 +22,7 @@ package com.hedera.addressBook;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Map;
import com.hedera.configLoader.ConfigLoader;
@@ -33,6 +34,7 @@ import com.hedera.hashgraph.sdk.HederaNetworkException;
import com.hedera.hashgraph.sdk.account.AccountId;
import com.hedera.hashgraph.sdk.crypto.ed25519.Ed25519PrivateKey;
import com.hedera.hashgraph.sdk.file.FileId;
+import com.hederahashgraph.api.proto.java.NodeAddressBook;
import io.github.cdimascio.dotenv.Dotenv;
@@ -50,6 +52,7 @@ public class NetworkAddressBook {
static Client client;
static Dotenv dotenv = Dotenv.configure().ignoreIfMissing().load();
+ static byte[] addressBookBytes = new byte[0];
public static void main(String[] args) {
@@ -62,8 +65,7 @@ public class NetworkAddressBook {
.setFileId(new FileId(0, 0, 102))
.execute();
- writeFile(contents.getFileContents().getContents().toByteArray());
- log.info("New address book successfully saved to {}", addressBookFile);
+ update(contents.getFileContents().getContents().toByteArray());
} catch (FileNotFoundException e) {
log.error("Address book file {} not found.", addressBookFile);
} catch (IOException e) {
@@ -75,10 +77,23 @@ public class NetworkAddressBook {
}
}
- public static void writeFile(byte[] newContents) throws IOException {
+ public static void update(byte[] newContents) throws IOException {
+ addressBookBytes = newContents;
+ savetoDisk();
+ }
+
+ public static void append(byte[] extraContents) throws IOException {
+ byte[] newAddressBook = Arrays.copyOf(addressBookBytes, addressBookBytes.length + extraContents.length);
+ System.arraycopy(extraContents, 0, newAddressBook, addressBookBytes.length, extraContents.length);
+ addressBookBytes = newAddressBook;
+ savetoDisk();
+ }
+
+ private static void savetoDisk() throws IOException {
FileOutputStream fos = new FileOutputStream(addressBookFile);
- fos.write(newContents);
+ fos.write(addressBookBytes);
fos.close();
+ log.info("New address book successfully saved to {}", addressBookFile);
}
private static Client createHederaClient() {
diff --git a/src/main/java/com/hedera/recordFileLogger/RecordFileLogger.java b/src/main/java/com/hedera/recordFileLogger/RecordFileLogger.java
index 7405686b3..939587269 100644
--- a/src/main/java/com/hedera/recordFileLogger/RecordFileLogger.java
+++ b/src/main/java/com/hedera/recordFileLogger/RecordFileLogger.java
@@ -645,6 +645,15 @@ public class RecordFileLogger {
if ( ! bSkip) {
sqlInsertFileData.addBatch();
}
+
+ // update the local address book
+ FileID updatedFile = body.getFileAppend().getFileID();
+
+ if ((updatedFile.getFileNum() == 102) && (updatedFile.getShardNum() == 0) && (updatedFile.getRealmNum() == 0)) {
+ // we have an address book update, refresh the local file
+ NetworkAddressBook.append(contents);
+ }
+
}
} else if (body.hasFileCreate()) {
if (ConfigLoader.getPersistFiles().contentEquals("ALL") || (ConfigLoader.getPersistFiles().contentEquals("SYSTEM") && txRecord.getReceipt().getFileID().getFileNum() < 1000)) {
@@ -673,7 +682,7 @@ public class RecordFileLogger {
if ((updatedFile.getFileNum() == 102) && (updatedFile.getShardNum() == 0) && (updatedFile.getRealmNum() == 0)) {
// we have an address book update, refresh the local file
- NetworkAddressBook.writeFile(body.getFileUpdate().getContents().toByteArray());
+ NetworkAddressBook.update(body.getFileUpdate().getContents().toByteArray());
}
} else if (body.hasFreeze()) { | ['src/main/java/com/hedera/addressBook/NetworkAddressBook.java', 'src/main/java/com/hedera/recordFileLogger/RecordFileLogger.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 262,900 | 60,976 | 7,575 | 32 | 1,782 | 348 | 34 | 2 | 1,388 | 87 | 325 | 25 | 0 | 1 | 1970-01-01T00:26:07 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,503 | hashgraph/hedera-mirror-node/133/132 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/132 | https://github.com/hashgraph/hedera-mirror-node/pull/133 | https://github.com/hashgraph/hedera-mirror-node/pull/133 | 1 | fixes | RecordFileDownloader re-downloads same files over and over | The hash map in ApplicationStatus.java isn't static so when it's used by several classes, changes made by one don't propagate to the other. As a result, RecordFileDownloader and Downloader were managing two separate hash maps. | 7c1b352c86bb9b9712d3ab4b82c160b63e19bb13 | f7ef9da46ce2acc3b5184535df1d707586e65185 | https://github.com/hashgraph/hedera-mirror-node/compare/7c1b352c86bb9b9712d3ab4b82c160b63e19bb13...f7ef9da46ce2acc3b5184535df1d707586e65185 | diff --git a/src/main/java/com/hedera/databaseUtilities/ApplicationStatus.java b/src/main/java/com/hedera/databaseUtilities/ApplicationStatus.java
index 8d2847feb..30eab5987 100644
--- a/src/main/java/com/hedera/databaseUtilities/ApplicationStatus.java
+++ b/src/main/java/com/hedera/databaseUtilities/ApplicationStatus.java
@@ -22,7 +22,7 @@ public class ApplicationStatus {
,LAST_PROCESSED_RECORD_HASH
}
- private ConcurrentHashMap<ApplicationStatusCode, String> applicationStatusMap = new ConcurrentHashMap<ApplicationStatusCode, String>();
+ private static ConcurrentHashMap<ApplicationStatusCode, String> applicationStatusMap = new ConcurrentHashMap<ApplicationStatusCode, String>();
private static final String updateSQL = "UPDATE t_application_status SET "
+ " status_value = ? "
diff --git a/src/main/java/com/hedera/parser/RecordFileParser.java b/src/main/java/com/hedera/parser/RecordFileParser.java
index 08217bf20..c575e4750 100644
--- a/src/main/java/com/hedera/parser/RecordFileParser.java
+++ b/src/main/java/com/hedera/parser/RecordFileParser.java
@@ -8,6 +8,7 @@ import java.security.MessageDigest;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import com.google.common.base.Stopwatch; | ['src/main/java/com/hedera/databaseUtilities/ApplicationStatus.java', 'src/main/java/com/hedera/parser/RecordFileParser.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 252,987 | 58,622 | 7,272 | 30 | 320 | 43 | 3 | 2 | 226 | 35 | 47 | 1 | 0 | 0 | 1970-01-01T00:26:06 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,504 | hashgraph/hedera-mirror-node/59/57 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/57 | https://github.com/hashgraph/hedera-mirror-node/pull/59 | https://github.com/hashgraph/hedera-mirror-node/pull/59 | 1 | fixes | FileWatcher not notified of files created while down | `BalanceFileLogger` uses the Java file watching API to be notified of creates and updates of files that the downloader writes. However, if `BalanceFileLogger` is down while downloader writes them then there will be no notification. We need to look for new files on startup. | ca4696cc42aecdb6c56986b390ab4e69f63db65c | 2f7ce61451e9e12f4577d57948f600004d80afca | https://github.com/hashgraph/hedera-mirror-node/compare/ca4696cc42aecdb6c56986b390ab4e69f63db65c...2f7ce61451e9e12f4577d57948f600004d80afca | diff --git a/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java b/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
index 3a5acffdc..de10c1c00 100644
--- a/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
+++ b/src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java
@@ -72,8 +72,6 @@ public class BalanceFileLogger extends FileWatcher {
,NUM
}
- private static Connection connect = null;
-
private static Instant fileTimestamp;
private static long fileSeconds = 0;
private static long fileNanos = 0;
@@ -144,7 +142,7 @@ public class BalanceFileLogger extends FileWatcher {
FileWatcher fileWatcher = new BalanceFileLogger(balanceFilePath);
fileWatcher.watch();
}
-
+
@Override
public void onCreate() {
processLastBalanceFile();
@@ -176,249 +174,236 @@ public class BalanceFileLogger extends FileWatcher {
if ( ! balanceFile.toString().endsWith(".csv") ) {
return false;
}
- try {
- // process the file
- connect = DatabaseUtilities.openDatabase(connect);
-
- if (connect != null) {
-
- try {
- connect.setAutoCommit(false);
- } catch (SQLException e) {
- log.error(LOGM_EXCEPTION, "Unable to unset database auto commit, Exception: {}", e.getMessage());
- return false;
- }
-
- if ( ! parseFileName(balanceFile)) {
- return false;
- }
-
- PreparedStatement selectBalance = connect.prepareStatement(
- "SELECT id"
- + " FROM t_account_balances"
- + " WHERE shard = ?"
- + " AND realm = ?"
- + " AND num = ?");
-
- PreparedStatement insertBalance = connect.prepareStatement(
- "INSERT INTO t_account_balances (shard, realm, num, balance) "
- + " VALUES (?, ?, ?, 0) "
- + " RETURNING id");
-
- PreparedStatement insertBalanceHistory;
- insertBalanceHistory = connect.prepareStatement(
- "insert into t_account_balance_history (snapshot_time, seconds, nanos, snapshot_time_ns, fk_balance_id, balance) "
- + " values ("
- + " ?" // snapshot
- + ", ?" // seconds
- + ", ?" // nanos
- + ", ?" // snapshot_time_ns
- + ", ?" // balance_id
- + ", ?" // balance
- + ")"
- + " ON CONFLICT (snapshot_time, seconds, fk_balance_id)"
- + " DO UPDATE set balance = EXCLUDED.balance");
-
- BufferedReader br = new BufferedReader(new FileReader(balanceFile));
-
- String line;
- while ((line = br.readLine()) != null) {
- if (processLine) {
- try {
- String[] balanceLine = line.split(",");
- if (balanceLine.length != 4) {
- log.error(LOGM_EXCEPTION, "Balance file {} appears truncated", balanceFile);
- connect.rollback();
- insertBalanceHistory.close();
- selectBalance.close();
- insertBalance.close();
- br.close();
- return false;
- } else {
-
- // get the account id from t_Account_balances
- long accountId = 0;
-
- selectBalance.setLong(BalanceSelect.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
- selectBalance.setLong(BalanceSelect.REALM.ordinal(), Long.valueOf(balanceLine[1]));
- selectBalance.setLong(BalanceSelect.NUM.ordinal(), Long.valueOf(balanceLine[2]));
-
- selectBalance.execute();
- ResultSet balanceRow = selectBalance.getResultSet();
-
- if (balanceRow.next()) {
- accountId = balanceRow.getLong(1);
- } else {
- insertBalance.setLong(BalanceHistoryInsertBalance.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
- insertBalance.setLong(BalanceHistoryInsertBalance.REALM.ordinal(), Long.valueOf(balanceLine[1]));
- insertBalance.setLong(BalanceHistoryInsertBalance.NUM.ordinal(), Long.valueOf(balanceLine[2]));
-
- insertBalance.execute();
-
- ResultSet newId = insertBalance.getResultSet();
- if (newId.next()) {
- accountId = newId.getLong(1);
- newId.close();
- } else {
- // failed to create or fetch the account from t_account_balances
- newId.close();
- balanceRow.close();
- throw new IllegalStateException("Unable to create or find, shard " + balanceLine[0] + ", realm " + balanceLine[1] + ", num " + balanceLine[2]);
- }
- }
- balanceRow.close();
- Timestamp timestamp = Timestamp.from(fileTimestamp);
- insertBalanceHistory.setTimestamp(BalanceHistoryInsert.SNAPSHOT_TIME.ordinal(), timestamp);
- insertBalanceHistory.setLong(BalanceHistoryInsert.SECONDS.ordinal(), fileSeconds);
- insertBalanceHistory.setLong(BalanceHistoryInsert.NANOS.ordinal(), fileNanos);
- insertBalanceHistory.setLong(BalanceHistoryInsert.SNAPSHOT_TIME_NS.ordinal(), Utility.convertInstantToNanos(fileTimestamp));
- insertBalanceHistory.setLong(BalanceHistoryInsert.FK_BAL_ID.ordinal(), accountId);
- insertBalanceHistory.setLong(BalanceHistoryInsert.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
-
- insertBalanceHistory.execute();
- }
- } catch (SQLException e) {
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- connect.rollback();
- insertBalanceHistory.close();
- selectBalance.close();
- insertBalance.close();
- br.close();
- return false;
- }
- } else if (line.contains("shard")) {
- processLine = true;
- }
- }
- connect.commit();
- insertBalanceHistory.close();
- selectBalance.close();
- insertBalance.close();
- br.close();
- return true;
- }
- } catch (FileNotFoundException e) {
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- } catch (IOException e) {
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- } catch (SQLException e) {
+ try (Connection connect = DatabaseUtilities.getConnection()) {
+ try {
+ connect.setAutoCommit(false);
+ } catch (SQLException e) {
+ log.error(LOGM_EXCEPTION, "Unable to unset database auto commit, Exception: {}", e.getMessage());
+ return false;
+ }
+
+ if ( ! parseFileName(balanceFile)) {
+ return false;
+ }
+
+ PreparedStatement selectBalance = connect.prepareStatement(
+ "SELECT id"
+ + " FROM t_account_balances"
+ + " WHERE shard = ?"
+ + " AND realm = ?"
+ + " AND num = ?");
+
+ PreparedStatement insertBalance = connect.prepareStatement(
+ "INSERT INTO t_account_balances (shard, realm, num, balance) "
+ + " VALUES (?, ?, ?, 0) "
+ + " RETURNING id");
+
+ PreparedStatement insertBalanceHistory;
+ insertBalanceHistory = connect.prepareStatement(
+ "insert into t_account_balance_history (snapshot_time, seconds, nanos, snapshot_time_ns, fk_balance_id, balance) "
+ + " values ("
+ + " ?" // snapshot
+ + ", ?" // seconds
+ + ", ?" // nanos
+ + ", ?" // snapshot_time_ns
+ + ", ?" // balance_id
+ + ", ?" // balance
+ + ")"
+ + " ON CONFLICT (snapshot_time, seconds, fk_balance_id)"
+ + " DO UPDATE set balance = EXCLUDED.balance");
+
+ BufferedReader br = new BufferedReader(new FileReader(balanceFile));
+
+ String line;
+ while ((line = br.readLine()) != null) {
+ if (processLine) {
+ try {
+ String[] balanceLine = line.split(",");
+ if (balanceLine.length != 4) {
+ log.error(LOGM_EXCEPTION, "Balance file {} appears truncated", balanceFile);
+ connect.rollback();
+ insertBalanceHistory.close();
+ selectBalance.close();
+ insertBalance.close();
+ br.close();
+ return false;
+ } else {
+
+ // get the account id from t_Account_balances
+ long accountId = 0;
+
+ selectBalance.setLong(BalanceSelect.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
+ selectBalance.setLong(BalanceSelect.REALM.ordinal(), Long.valueOf(balanceLine[1]));
+ selectBalance.setLong(BalanceSelect.NUM.ordinal(), Long.valueOf(balanceLine[2]));
+
+ selectBalance.execute();
+ ResultSet balanceRow = selectBalance.getResultSet();
+
+ if (balanceRow.next()) {
+ accountId = balanceRow.getLong(1);
+ } else {
+ insertBalance.setLong(BalanceHistoryInsertBalance.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
+ insertBalance.setLong(BalanceHistoryInsertBalance.REALM.ordinal(), Long.valueOf(balanceLine[1]));
+ insertBalance.setLong(BalanceHistoryInsertBalance.NUM.ordinal(), Long.valueOf(balanceLine[2]));
+
+ insertBalance.execute();
+
+ ResultSet newId = insertBalance.getResultSet();
+ if (newId.next()) {
+ accountId = newId.getLong(1);
+ newId.close();
+ } else {
+ // failed to create or fetch the account from t_account_balances
+ newId.close();
+ balanceRow.close();
+ throw new IllegalStateException("Unable to create or find, shard " + balanceLine[0] + ", realm " + balanceLine[1] + ", num " + balanceLine[2]);
+ }
+ }
+ balanceRow.close();
+ Timestamp timestamp = Timestamp.from(fileTimestamp);
+ insertBalanceHistory.setTimestamp(BalanceHistoryInsert.SNAPSHOT_TIME.ordinal(), timestamp);
+ insertBalanceHistory.setLong(BalanceHistoryInsert.SECONDS.ordinal(), fileSeconds);
+ insertBalanceHistory.setLong(BalanceHistoryInsert.NANOS.ordinal(), fileNanos);
+ insertBalanceHistory.setLong(BalanceHistoryInsert.SNAPSHOT_TIME_NS.ordinal(), Utility.convertInstantToNanos(fileTimestamp));
+ insertBalanceHistory.setLong(BalanceHistoryInsert.FK_BAL_ID.ordinal(), accountId);
+ insertBalanceHistory.setLong(BalanceHistoryInsert.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
+
+ insertBalanceHistory.execute();
+ }
+ } catch (SQLException e) {
+ log.error(LOGM_EXCEPTION, "Exception {}", e);
+ connect.rollback();
+ insertBalanceHistory.close();
+ selectBalance.close();
+ insertBalance.close();
+ br.close();
+ return false;
+ }
+ } else if (line.contains("shard")) {
+ processLine = true;
+ }
+ }
+ connect.commit();
+ insertBalanceHistory.close();
+ selectBalance.close();
+ insertBalance.close();
+ br.close();
+ return true;
+ } catch (Exception e) {
log.error(LOGM_EXCEPTION, "Exception {}", e);
}
return false;
}
private static void processLastBalanceFile() {
-
boolean processLine = false;
- try {
- File balanceFile = getLatestBalancefile();
-
- if (balanceFile != null) {
- // process the file
- connect = DatabaseUtilities.openDatabase(connect);
-
- if ( ! parseFileName(balanceFile)) {
- return;
- }
-
- if (connect != null) {
- connect.setAutoCommit(false);
-
- PreparedStatement updateLastBalanceTime = connect.prepareStatement(
- "UPDATE t_account_balance_refresh_time"
- + " SET seconds = ?"
- + ",nanos = ?");
-
- PreparedStatement selectBalance = connect.prepareStatement(
- "SELECT id"
- + " FROM t_account_balances"
- + " WHERE shard = ?"
- + " AND realm = ?"
- + " AND num = ?");
-
- PreparedStatement updateBalance = connect.prepareStatement(
- "UPDATE t_account_balances"
- + " SET balance = ?"
- + " WHERE id = ?");
-
-
- PreparedStatement insertBalance = connect.prepareStatement(
- "INSERT INTO t_account_balances (shard, realm, num, balance) "
- + " VALUES (?, ?, ?, ?)");
-
- // update last file update time
- updateLastBalanceTime.setLong(1, fileSeconds);
- updateLastBalanceTime.setLong(2, fileNanos);
- updateLastBalanceTime.execute();
-
- BufferedReader br = new BufferedReader(new FileReader(balanceFile));
-
- String line;
- while ((line = br.readLine()) != null) {
- if (processLine) {
- try {
- String[] balanceLine = line.split(",");
- if (balanceLine.length != 4) {
- log.error(LOGM_EXCEPTION, "Balance file {} appears truncated", balanceFile);
- connect.rollback();
- break;
- } else {
- selectBalance.setLong(BalanceSelect.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
- selectBalance.setLong(BalanceSelect.REALM.ordinal(), Long.valueOf(balanceLine[1]));
- selectBalance.setLong(BalanceSelect.NUM.ordinal(), Long.valueOf(balanceLine[2]));
-
- selectBalance.execute();
- ResultSet balanceRow = selectBalance.getResultSet();
- if (balanceRow.next()) {
- // update the balance
- updateBalance.setLong(BalanceUpdate.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
- updateBalance.setLong(BalanceUpdate.ID.ordinal(), balanceRow.getLong(1));
- updateBalance.execute();
- } else {
- // insert new row
- insertBalance.setLong(BalanceInsert.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
- insertBalance.setLong(BalanceInsert.REALM.ordinal(), Long.valueOf(balanceLine[1]));
- insertBalance.setLong(BalanceInsert.NUM.ordinal(), Long.valueOf(balanceLine[2]));
- insertBalance.setLong(BalanceInsert.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
- insertBalance.execute();
- }
- balanceRow.close();
- }
-
- } catch (SQLException e) {
- connect.rollback();
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- break;
- }
- } else if (line.contentEquals("shard,realm,number,balance")) {
- // skip all lines until shard,realm,number,balance
- processLine = true;
- } else if (line.contentEquals("shardNum,realmNum,accountNum,balance")) {
- // skip all lines until shard,realm,number,balance
- processLine = true;
- }
- }
- connect.commit();
- insertBalance.close();
- updateBalance.close();
- selectBalance.close();
- updateLastBalanceTime.close();
- br.close();
- }
- } else {
- log.info("No balance file to parse found");
- }
- } catch (IOException e) {
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- } catch (SQLException e) {
- log.error(LOGM_EXCEPTION, "Exception {}", e);
- try {
- connect.rollback();
- } catch (SQLException e1) {
- log.error(LOGM_EXCEPTION, "Exception {}", e1);
+ try (Connection connect = DatabaseUtilities.getConnection()) {
+ File balanceFile = getLatestBalancefile();
+
+ if (balanceFile == null) {
+ return;
+ } else if (!parseFileName(balanceFile)) {
+ log.info("Invalid balance file");
+ return;
}
- }
+
+ try {
+ connect.setAutoCommit(false);
+
+ PreparedStatement updateLastBalanceTime = connect.prepareStatement(
+ "UPDATE t_account_balance_refresh_time"
+ + " SET seconds = ?"
+ + ",nanos = ?");
+
+ PreparedStatement selectBalance = connect.prepareStatement(
+ "SELECT id"
+ + " FROM t_account_balances"
+ + " WHERE shard = ?"
+ + " AND realm = ?"
+ + " AND num = ?");
+
+ PreparedStatement updateBalance = connect.prepareStatement(
+ "UPDATE t_account_balances"
+ + " SET balance = ?"
+ + " WHERE id = ?");
+
+
+ PreparedStatement insertBalance = connect.prepareStatement(
+ "INSERT INTO t_account_balances (shard, realm, num, balance) "
+ + " VALUES (?, ?, ?, ?)");
+
+ // update last file update time
+ updateLastBalanceTime.setLong(1, fileSeconds);
+ updateLastBalanceTime.setLong(2, fileNanos);
+ updateLastBalanceTime.execute();
+
+ BufferedReader br = new BufferedReader(new FileReader(balanceFile));
+
+ String line;
+ while ((line = br.readLine()) != null) {
+ if (processLine) {
+ try {
+ String[] balanceLine = line.split(",");
+ if (balanceLine.length != 4) {
+ log.error(LOGM_EXCEPTION, "Balance file {} appears truncated", balanceFile);
+ connect.rollback();
+ break;
+ } else {
+ selectBalance.setLong(BalanceSelect.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
+ selectBalance.setLong(BalanceSelect.REALM.ordinal(), Long.valueOf(balanceLine[1]));
+ selectBalance.setLong(BalanceSelect.NUM.ordinal(), Long.valueOf(balanceLine[2]));
+
+ selectBalance.execute();
+ ResultSet balanceRow = selectBalance.getResultSet();
+ if (balanceRow.next()) {
+ // update the balance
+ updateBalance.setLong(BalanceUpdate.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
+ updateBalance.setLong(BalanceUpdate.ID.ordinal(), balanceRow.getLong(1));
+ updateBalance.execute();
+ } else {
+ // insert new row
+ insertBalance.setLong(BalanceInsert.SHARD.ordinal(), Long.valueOf(balanceLine[0]));
+ insertBalance.setLong(BalanceInsert.REALM.ordinal(), Long.valueOf(balanceLine[1]));
+ insertBalance.setLong(BalanceInsert.NUM.ordinal(), Long.valueOf(balanceLine[2]));
+ insertBalance.setLong(BalanceInsert.BALANCE.ordinal(), Long.valueOf(balanceLine[3]));
+ insertBalance.execute();
+ }
+ balanceRow.close();
+ }
+
+ } catch (SQLException e) {
+ connect.rollback();
+ log.error(LOGM_EXCEPTION, "Exception {}", e);
+ break;
+ }
+ } else if (line.contentEquals("shard,realm,number,balance")) {
+ // skip all lines until shard,realm,number,balance
+ processLine = true;
+ } else if (line.contentEquals("shardNum,realmNum,accountNum,balance")) {
+ // skip all lines until shard,realm,number,balance
+ processLine = true;
+ }
+ }
+ connect.commit();
+ insertBalance.close();
+ updateBalance.close();
+ selectBalance.close();
+ updateLastBalanceTime.close();
+ br.close();
+ } catch (IOException e) {
+ log.error(LOGM_EXCEPTION, "Exception {}", e);
+ } catch (SQLException e) {
+ log.error(LOGM_EXCEPTION, "Exception {}", e);
+ try {
+ connect.rollback();
+ } catch (SQLException e1) {
+ log.error(LOGM_EXCEPTION, "Exception {}", e1);
+ }
+ }
+ } catch (Exception e) {
+ log.error(LOGM_EXCEPTION, "Error closing connection {}", e);
+ }
log.info(MARKER, "Last Balance processing done");
}
}
diff --git a/src/main/java/com/hedera/fileWatcher/FileWatcher.java b/src/main/java/com/hedera/fileWatcher/FileWatcher.java
index f8ad62d0b..89b142992 100644
--- a/src/main/java/com/hedera/fileWatcher/FileWatcher.java
+++ b/src/main/java/com/hedera/fileWatcher/FileWatcher.java
@@ -21,6 +21,9 @@ public abstract class FileWatcher {
}
public void watch() {
+ // Invoke on startup to check for any changed files while this process was down.
+ onCreate();
+
try (WatchService watcher = FileSystems.getDefault().newWatchService()) {
Path path = pathToWatch.toPath();
WatchKey rootKey = path.register(watcher, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY); | ['src/main/java/com/hedera/balanceFileLogger/BalanceFileLogger.java', 'src/main/java/com/hedera/fileWatcher/FileWatcher.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 269,660 | 62,093 | 7,612 | 30 | 19,981 | 3,753 | 462 | 2 | 273 | 44 | 56 | 1 | 0 | 0 | 1970-01-01T00:26:06 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,501 | hashgraph/hedera-mirror-node/213/203 | hashgraph | hedera-mirror-node | https://github.com/hashgraph/hedera-mirror-node/issues/203 | https://github.com/hashgraph/hedera-mirror-node/pull/213 | https://github.com/hashgraph/hedera-mirror-node/pull/213 | 1 | fixes | DB migration error | **Detailed Description**
ERROR [main ] o.f.c.i.c.DbMigrate Migration of schema "public" to version 1.8 - MigrateConfig failed! Changes successfully rolled back.
The issue affects any attempt to migrate the database (which running any of the services does) when:
- The database had not already migrated v1.8 of the DB, **_and_**
- `./config/config.json` is not located relative to the working directory of the running service/app.
This would include anyone trying to install out of the CI-deliverable, using the instructions for installing that, against a fresh/empty database (as ./config/config.json is no longer located at that place relative to the running service's working directory).
**Workaround**
Ensure that "config.json" is found in the "./config/" directory relative to the working directory of the running processes (any of the downloaders or parsers).
If the working directory of the processes is /usr/lib/mirror-node and the config.json is in /usr/etc/mirror-node, then:
- `sudo ln -s /usr/etc/mirror-node /usr/lib/mirror-node/config`
- restart all downloader and parser processes
**Actual Behavior**
**Expected Behavior**
**Environment:**
**Additional Context**
Fix will be to remove the v1.8 migration code as it's not needed for a fresh DB, is causing this issue, and was only in place to upgrade from pre-v0.1.0 systems.
| f6e13feadced118b964abb73a9b5a0a4829a0ae8 | 1df691351b0177f0dd04569911b3e76d66dc68f2 | https://github.com/hashgraph/hedera-mirror-node/compare/f6e13feadced118b964abb73a9b5a0a4829a0ae8...1df691351b0177f0dd04569911b3e76d66dc68f2 | diff --git a/src/main/java/com/hedera/databaseUtilities/DatabaseUtilities.java b/src/main/java/com/hedera/databaseUtilities/DatabaseUtilities.java
index ce51629af..7cc839716 100644
--- a/src/main/java/com/hedera/databaseUtilities/DatabaseUtilities.java
+++ b/src/main/java/com/hedera/databaseUtilities/DatabaseUtilities.java
@@ -53,6 +53,16 @@ public class DatabaseUtilities {
.dataSource(dataSource)
.baselineOnMigrate(true)
.baselineVersion(MigrationVersion.fromVersion("0"))
+
+ // Allow missing migrations without failing the overall migration.
+ // This would allow the database to have a "V1.23" migration in it (before the product is 1.0-ready),
+ // that gets removed by developers at a future point.
+ // When the software is updated and no longer has V1.23 in it (bad migration file removed by dev),
+ // but flyway sees a database that had V1.23 in it - ignore this issue and continue flyway migrations.
+ // This allows development to more easily "fix" some bad initial migration files without resorting
+ // to `flyway repair`.
+ .ignoreMissingMigrations(true)
+
.placeholders(ImmutableMap.of(
"api-user", ConfigLoader.getApiUsername(),
"api-password", ConfigLoader.getApiPassword(),
diff --git a/src/main/java/db/migration/V1_8__MigrateConfig.java b/src/main/java/db/migration/V1_8__MigrateConfig.java
deleted file mode 100644
index d57fe3292..000000000
--- a/src/main/java/db/migration/V1_8__MigrateConfig.java
+++ /dev/null
@@ -1,203 +0,0 @@
-package db.migration;
-
-/*-
- *
- * Hedera Mirror Node
- *
- * Copyright (C) 2019 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import org.flywaydb.core.api.migration.BaseJavaMigration;
-import org.flywaydb.core.api.migration.Context;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonIOException;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSyntaxException;
-import com.hedera.databaseUtilities.ApplicationStatus;
-
-import lombok.extern.log4j.Log4j2;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.PreparedStatement;
-
-@Log4j2
-public class V1_8__MigrateConfig extends BaseJavaMigration {
- public void migrate(Context context) throws Exception {
-
- String configSavePath = "./config/config.json";
- String balanceSavePath = "./config/balance.json";
- String recordsSavePath = "./config/records.json";
- String eventsSavePath = "./config/events.json";
- String loggerSavePath = "./config/loggerStatus.json";
-
- JsonObject configJsonObject;
- JsonObject balanceJsonObject;
- JsonObject recordsJsonObject;
- JsonObject eventsJsonObject;
- JsonObject loggerJsonObject;
-
- String stopLoggingIfRecordHashMismatchAfter = "";
- String stopLoggingIfEventHashMismatchAfter = "";
- String lastValidBalanceFileName = "";
- String lastValidRcdFileName = "";
- String lastValidRcdFileHash = "";
- String lastValidEventFileName = "";
- String lastValidEventFileHash = "";
- String lastProcessedRcdHash = "";
- String lastProcessedEventHash = "";
-
- configJsonObject = getJsonObject(configSavePath);
-
- if (configJsonObject.has("stopLoggingIfRecordHashMismatch")) {
- stopLoggingIfRecordHashMismatchAfter = configJsonObject.get("stopLoggingIfRecordHashMismatch").getAsString();
- }
-
- if (configJsonObject.has("stopLoggingIfEventHashMismatch")) {
- stopLoggingIfEventHashMismatchAfter = configJsonObject.get("stopLoggingIfEventHashMismatch").getAsString();
- }
- File balanceFile = new File(balanceSavePath);
- if (balanceFile.exists()) {
- balanceJsonObject = getJsonObject(balanceSavePath);
- if (balanceJsonObject.has("lastValidBalanceFileName")) {
- lastValidBalanceFileName = balanceJsonObject.get("lastValidBalanceFileName").getAsString();
- }
- } else if (configJsonObject.has("lastValidBalanceFileName")) {
- lastValidBalanceFileName = configJsonObject.get("lastValidBalanceFileName").getAsString();
- }
-
- File recordFile = new File(recordsSavePath);
- if (recordFile.exists()) {
- recordsJsonObject = getJsonObject(recordsSavePath);
- if (recordsJsonObject.has("lastValidRcdFileName")) {
- lastValidRcdFileName = recordsJsonObject.get("lastValidRcdFileName").getAsString();
- }
- if (recordsJsonObject.has("lastValidRcdFileHash")) {
- lastValidRcdFileHash = recordsJsonObject.get("lastValidRcdFileHash").getAsString();
- }
- } else if ((configJsonObject.has("lastValidRcdFileName")) || (configJsonObject.has("lastValidRcdFileHash"))) {
- if (configJsonObject.has("lastValidRcdFileName")) {
- lastValidRcdFileName = configJsonObject.get("lastValidRcdFileName").getAsString();
- }
- if (configJsonObject.has("lastValidRcdFileHash")) {
- lastValidRcdFileHash = configJsonObject.get("lastValidRcdFileHash").getAsString();
- }
- }
-
- File eventFile = new File(eventsSavePath);
- if (eventFile.exists()) {
- eventsJsonObject = getJsonObject(eventsSavePath);
- if (eventsJsonObject.has("lastValidEventFileName")) {
- lastValidEventFileName = eventsJsonObject.get("lastValidEventFileName").getAsString();
- }
- if (eventsJsonObject.has("lastValidEventFileHash")) {
- lastValidEventFileHash = eventsJsonObject.get("lastValidEventFileHash").getAsString();
- }
- }
-
- File loggerFile = new File (loggerSavePath);
-
- if (loggerFile.exists()) {
- loggerJsonObject = getJsonObject(loggerSavePath);
-
- if (loggerJsonObject.has("lastProcessedRcdHash")) {
- lastProcessedRcdHash = loggerJsonObject.get("lastProcessedRcdHash").getAsString();
- }
- if (loggerJsonObject.has("lastProcessedEventHash")) {
- lastProcessedEventHash = loggerJsonObject.get("lastProcessedEventHash").getAsString();
- }
- }
-
- try (PreparedStatement updateValue = context.getConnection().prepareStatement(
- "UPDATE t_application_status SET "
- + " status_value = ? "
- + " WHERE status_code = ?")) {
- updateValue.setString(1, stopLoggingIfEventHashMismatchAfter);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.EVENT_HASH_MISMATCH_BYPASS_UNTIL_AFTER.name());
- updateValue.execute();
- updateValue.setString(1, stopLoggingIfRecordHashMismatchAfter);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.RECORD_HASH_MISMATCH_BYPASS_UNTIL_AFTER.name());
- updateValue.execute();
- updateValue.setString(1, lastValidBalanceFileName);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_VALID_DOWNLOADED_BALANCE_FILE.name());
- updateValue.execute();
- updateValue.setString(1, lastValidRcdFileName);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_VALID_DOWNLOADED_RECORD_FILE.name());
- updateValue.execute();
- updateValue.setString(1, lastValidRcdFileHash);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_VALID_DOWNLOADED_RECORD_FILE_HASH.name());
- updateValue.execute();
- updateValue.setString(1, lastValidEventFileName);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_VALID_DOWNLOADED_EVENT_FILE.name());
- updateValue.execute();
- updateValue.setString(1, lastValidEventFileHash);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_VALID_DOWNLOADED_EVENT_FILE_HASH.name());
- updateValue.execute();
- updateValue.setString(1, lastProcessedRcdHash);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_PROCESSED_RECORD_HASH.name());
- updateValue.execute();
- updateValue.setString(1, lastProcessedEventHash);
- updateValue.setString(2, ApplicationStatus.ApplicationStatusCode.LAST_PROCESSED_EVENT_HASH.name());
- updateValue.execute();
- }
-
- // remove from config file
- configJsonObject.remove("lastValidRcdFileName");
- configJsonObject.remove("lastValidRcdFileHash");
- configJsonObject.remove("stopLoggingIfRecordHashMismatch");
- configJsonObject.remove("stopLoggingIfEventHashMismatch");
- configJsonObject.remove("lastValidBalanceFileName");
-
- try (FileWriter configFile = new FileWriter(configSavePath)) {
- Gson gson = new GsonBuilder().setPrettyPrinting().create();
- gson.toJson(configJsonObject, configFile);
- log.debug("Successfully wrote update to {}", configSavePath);
- } catch (IOException ex) {
- log.error("Fail to write update to {}", configSavePath, ex);
- throw ex;
- }
-
- if (balanceFile.exists()) {
- balanceFile.delete();
- }
- if (recordFile.exists()) {
- recordFile.delete();
- }
- if (eventFile.exists()) {
- eventFile.delete();
- }
- if (loggerFile.exists()) {
- loggerFile.delete();
- }
- }
-
- private JsonObject getJsonObject(
- final String location) throws JsonIOException, JsonSyntaxException, FileNotFoundException {
-
- final JsonParser parser = new JsonParser();
-
- // Read file into object
- final FileReader file = new FileReader(location);
- return (JsonObject) parser.parse(file);
- }
-
-} | ['src/main/java/db/migration/V1_8__MigrateConfig.java', 'src/main/java/com/hedera/databaseUtilities/DatabaseUtilities.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 264,090 | 61,221 | 7,598 | 32 | 8,948 | 1,940 | 213 | 2 | 1,375 | 192 | 308 | 27 | 0 | 0 | 1970-01-01T00:26:07 | 103 | Java | {'Java': 8228904, 'JavaScript': 1607027, 'Go': 582284, 'Kotlin': 65844, 'PLpgSQL': 50429, 'Solidity': 45435, 'Gherkin': 33509, 'Shell': 27165, 'Mustache': 22158, 'Dockerfile': 17413, 'HTML': 1783, 'Python': 1445, 'CSS': 1425} | Apache License 2.0 |
1,181 | adyen/adyen-android/1242/1234 | adyen | adyen-android | https://github.com/Adyen/adyen-android/issues/1234 | https://github.com/Adyen/adyen-android/pull/1242 | https://github.com/Adyen/adyen-android/pull/1242 | 1 | fixes | Crash on AwaitView when DNKA is active | **Describe the bug**
Application Crashes when trying to pay with MBWay with DNKA (Do Not Keep Activities) active, if the user goes to background and then to foreground again.
This is problematic because MBWay will force the user to navigate to a different app to confirm the payment, which can increase the probability of this issue to occur in production.
**To Reproduce**
Steps to reproduce the behavior:
1. Make sure the device is with DNKA active
2. Start a payment with MBWay
3. While the payment is awaiting confirmation (see screenshot) go to background
4. Bring the app again back to foreground
5. Crash happens
**Expected behavior**
When returning to the app, the DropIn would be restored with the AwaitView and the process would conclude as expected.
**Screenshots**
<img src="https://github.com/Adyen/adyen-android/assets/19568050/84de811b-9120-4524-9339-07131048e6a2" width="200" height="400" />
**Smartphone (please complete the following information):**
- Device: Any
- OS: Any
- Version: 4.12.0
**Additional context**
Stacktrace:
```
java.lang.NullPointerException:
Attempt to invoke virtual method 'int java.lang.String.hashCode()' on a null object reference
at com.adyen.checkout.await.AwaitView.getMessageTextResource(AwaitView.java:126)
at com.adyen.checkout.await.AwaitView.updateMessageText(AwaitView.java:118)
at com.adyen.checkout.await.AwaitView.onChanged(AwaitView.java:95)
at com.adyen.checkout.await.AwaitView.onChanged(AwaitView.java:32)
at androidx.lifecycle.LiveData.considerNotify(LiveData.java:133)
at androidx.lifecycle.LiveData.dispatchingValue(LiveData.java:146)
at androidx.lifecycle.LiveData$ObserverWrapper.activeStateChanged(LiveData.java:468)
at androidx.lifecycle.LiveData$LifecycleBoundObserver.onStateChanged(LiveData.java:425)
at androidx.lifecycle.LifecycleRegistry$ObserverWithState.dispatchEvent(LifecycleRegistry.java:360)
at androidx.lifecycle.LifecycleRegistry.forwardPass(LifecycleRegistry.java:271)
at androidx.lifecycle.LifecycleRegistry.sync(LifecycleRegistry.java:313)
at androidx.lifecycle.LifecycleRegistry.moveToState(LifecycleRegistry.java:151)
at androidx.lifecycle.LifecycleRegistry.handleLifecycleEvent(LifecycleRegistry.java:134)
at androidx.fragment.app.FragmentViewLifecycleOwner.handleLifecycleEvent(FragmentViewLifecycleOwner.java:93)
at androidx.fragment.app.Fragment.performStart(Fragment.java:3169)
at androidx.fragment.app.FragmentStateManager.start(FragmentStateManager.java:588)
at androidx.fragment.app.FragmentStateManager.moveToExpectedState(FragmentStateManager.java:279)
at androidx.fragment.app.FragmentStore.moveToExpectedState(FragmentStore.java:113)
at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1433)
at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2977)
at androidx.fragment.app.FragmentManager.dispatchStart(FragmentManager.java:2902)
at androidx.fragment.app.FragmentController.dispatchStart(FragmentController.java:274)
at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:359)
at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:248)
at com.adyen.checkout.dropin.ui.DropInActivity.onStart(DropInActivity.kt:248)
at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1455)
at android.app.Activity.performStart(Activity.java:8076)
at android.app.ActivityThread.handleStartActivity(ActivityThread.java:3660)
at android.app.servertransaction.TransactionExecutor.performLifecycleSequence(TransactionExecutor.java:221)
at android.app.servertransaction.TransactionExecutor.cycleToPath(TransactionExecutor.java:201)
at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:173)
at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:97)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:2210)
at android.os.Handler.dispatchMessage(Handler.java:106)
at android.os.Looper.loopOnce(Looper.java:201)
at android.os.Looper.loop(Looper.java:288)
at android.app.ActivityThread.main(ActivityThread.java:7839)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:548)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1003)
```
| c2601443d8288a1cd4cfcc315a3a3644f2602c63 | 6ed7e3b4601b4822fc54a4ced0327c95f7f3a927 | https://github.com/adyen/adyen-android/compare/c2601443d8288a1cd4cfcc315a3a3644f2602c63...6ed7e3b4601b4822fc54a4ced0327c95f7f3a927 | diff --git a/await/src/main/java/com/adyen/checkout/await/AwaitView.java b/await/src/main/java/com/adyen/checkout/await/AwaitView.java
index 9c305fc26..73a09d0e5 100644
--- a/await/src/main/java/com/adyen/checkout/await/AwaitView.java
+++ b/await/src/main/java/com/adyen/checkout/await/AwaitView.java
@@ -123,6 +123,10 @@ public class AwaitView extends AdyenLinearLayout<AwaitOutputData, AwaitConfigura
@StringRes
private Integer getMessageTextResource() {
+ if (mPaymentMethodType == null) {
+ return null;
+ }
+
switch (mPaymentMethodType) {
case PaymentMethodTypes.BLIK:
return R.string.checkout_await_message_blik; | ['await/src/main/java/com/adyen/checkout/await/AwaitView.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 579,687 | 119,249 | 17,138 | 189 | 81 | 17 | 4 | 1 | 5,095 | 247 | 960 | 71 | 1 | 1 | 1970-01-01T00:28:07 | 100 | Kotlin | {'Kotlin': 3298243, 'Java': 600315, 'Shell': 2967} | MIT License |
3,602 | azure/azure-sdk-tools/388/384 | azure | azure-sdk-tools | https://github.com/Azure/azure-sdk-tools/issues/384 | https://github.com/Azure/azure-sdk-tools/pull/388 | https://github.com/Azure/azure-sdk-tools/pull/388 | 2 | fixes | [Bug-Java] Nested interface is not shown. | The nested interface is not shown here, but it should be shown because it is an interface defined in an interface. Anything in an interface is public by default.
For example, https://apiview.dev/Assemblies/Review/bc9e446581a4475ca4df0a011c152da7
The Azure search API view doesn't show the inner nested interface [BuilderDefinition](https://github.com/Azure/azure-sdk-for-java/blob/master/sdk/cosmos/microsoft-azure-cosmos/src/main/java/com/azure/data/cosmos/ChangeFeedProcessor.java#L79) within the public interface ChangeFeedProcessor.
| 478a6f039e84e985df691047a2794681c29037b2 | 6eb476d214a6a850967e5588585dc78f07609eaf | https://github.com/azure/azure-sdk-tools/compare/478a6f039e84e985df691047a2794681c29037b2...6eb476d214a6a850967e5588585dc78f07609eaf | diff --git a/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/ASTAnalyser.java b/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/ASTAnalyser.java
index 5eb15d32..3ef55df7 100644
--- a/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/ASTAnalyser.java
+++ b/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/ASTAnalyser.java
@@ -1,5 +1,6 @@
package com.azure.tools.apiview.processor.analysers;
+import com.azure.tools.apiview.processor.analysers.util.ASTUtils;
import com.azure.tools.apiview.processor.analysers.util.SourceJarTypeSolver;
import com.azure.tools.apiview.processor.diagnostics.Diagnostics;
import com.github.javaparser.StaticJavaParser;
@@ -54,7 +55,6 @@ import static com.azure.tools.apiview.processor.model.TokenKind.*;
import static com.azure.tools.apiview.processor.analysers.util.ASTUtils.*;
public class ASTAnalyser implements Analyser {
- private final File inputFile;
private final APIListing apiListing;
private final Map<String, ChildItem> packageNameToNav;
@@ -62,7 +62,6 @@ public class ASTAnalyser implements Analyser {
private int indent;
public ASTAnalyser(File inputFile, APIListing apiListing) {
- this.inputFile = inputFile;
this.apiListing = apiListing;
this.indent = 0;
this.packageNameToNav = new HashMap<>();
@@ -78,20 +77,17 @@ public class ASTAnalyser implements Analyser {
else if (inputFileName.contains("implementation")) return false;
else if (inputFileName.contains("package-info.java")) return false;
else if (inputFileName.contains("module-info.java")) return false;
- else if (!inputFileName.endsWith(".java")) return false;
- else return true;
+ else return inputFileName.endsWith(".java");
}).collect(Collectors.toList());
// then we do a pass to build a map of all known types and package names, and a map of package names to nav items,
// followed by a pass to tokenise each file
allFiles.stream()
.map(this::scanForTypes)
- .collect(Collectors.toList())
- .stream()
.filter(Optional::isPresent)
.map(Optional::get)
- .sorted((s1, s2) -> s1.path.compareTo(s2.path))
- .forEach(scanClass -> processSingleFile(scanClass));
+ .sorted(Comparator.comparing(s -> s.path))
+ .forEach(this::processSingleFile);
// build the navigation
packageNameToNav.values().stream()
@@ -115,7 +111,6 @@ public class ASTAnalyser implements Analyser {
// Set up a minimal type solver that only looks at the classes used to run this sample.
CombinedTypeSolver combinedTypeSolver = new CombinedTypeSolver();
combinedTypeSolver.add(new ReflectionTypeSolver());
-// combinedTypeSolver.add(new SourceJarTypeSolver(inputFile));
// Configure JavaParser to use type resolution
StaticJavaParser.getConfiguration()
@@ -252,8 +247,8 @@ public class ASTAnalyser implements Analyser {
}
private boolean getTypeDeclaration(TypeDeclaration<?> typeDeclaration) {
- // Skip if the class is private or package-private
- if (isPrivateOrPackagePrivate(typeDeclaration.getAccessSpecifier())) {
+ // Skip if the class is private or package-private, unless it is a nested type defined inside a public interface
+ if (!isTypeAPublicAPI(typeDeclaration)) {
return true;
}
@@ -676,16 +671,21 @@ public class ASTAnalyser implements Analyser {
.forEach(name -> name.getQualifier().ifPresent(packageName -> {
apiListing.addPackageTypeMapping(packageName.toString(), name.getIdentifier());
}));
- }
}
+ }
+ /*
+ * This method is only called in relation to building up the types for linking, it does not build up the actual
+ * text output that is displayed to the user.
+ */
private void getTypeDeclaration(TypeDeclaration<?> typeDeclaration) {
- // Skip if the class is private or package-private
- if (isPrivateOrPackagePrivate(typeDeclaration.getAccessSpecifier())) {
+ // Skip if the class is private or package-private, unless it is a nested type defined inside a public interface
+ if (!isTypeAPublicAPI(typeDeclaration)) {
return;
}
- if (! (typeDeclaration.isClassOrInterfaceDeclaration() || typeDeclaration.isEnumDeclaration())) {
+ final boolean isInterfaceType = typeDeclaration.isClassOrInterfaceDeclaration();
+ if (! (isInterfaceType || typeDeclaration.isEnumDeclaration())) {
return;
}
diff --git a/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/util/ASTUtils.java b/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/util/ASTUtils.java
index 836b3d58..da098d66 100644
--- a/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/util/ASTUtils.java
+++ b/src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/util/ASTUtils.java
@@ -88,4 +88,30 @@ public class ASTUtils {
return "";
}
}
+
+ /**
+ * Returns true if the type is public or protected, or it the type is an interface that is defined within another
+ * public interface.
+ */
+ public static boolean isTypeAPublicAPI(TypeDeclaration type) {
+ final boolean isInterfaceType = type.isClassOrInterfaceDeclaration();
+ final boolean isNestedType = type.isNestedType();
+
+ // Skip if the class is private or package-private, unless it is a nested type defined inside a public interface
+ if (isPrivateOrPackagePrivate(type.getAccessSpecifier())) {
+ if (isNestedType && isInterfaceType) {
+ boolean isInPublicParent = type.getParentNode()
+ .filter(parentNode -> parentNode instanceof ClassOrInterfaceDeclaration)
+ .map(parentNode -> isPublicOrProtected(((ClassOrInterfaceDeclaration)parentNode).getAccessSpecifier()))
+ .orElse(false);
+
+ if (! isInPublicParent) {
+ return false;
+ }
+ } else {
+ return false;
+ }
+ }
+ return true;
+ }
} | ['src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/util/ASTUtils.java', 'src/java/apiview-java-processor/src/main/java/com/azure/tools/apiview/processor/analysers/ASTAnalyser.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 63,760 | 12,190 | 1,679 | 18 | 2,975 | 562 | 56 | 2 | 543 | 49 | 134 | 5 | 2 | 0 | 1970-01-01T00:26:21 | 100 | C# | {'C#': 3648314, 'TypeScript': 1010241, 'Python': 790921, 'PowerShell': 770752, 'Java': 312306, 'C++': 185881, 'HTML': 176466, 'Swift': 137428, 'Go': 66983, 'JavaScript': 65011, 'Bicep': 46692, 'SCSS': 39650, 'CMake': 26255, 'Nunjucks': 19830, 'Shell': 13241, 'Smarty': 11608, 'CSS': 6089, 'Dockerfile': 5601, 'Batchfile': 2474, 'Objective-C': 510} | MIT License |
946 | jdi-testing/jdi-light/1506/1307 | jdi-testing | jdi-light | https://github.com/jdi-testing/jdi-light/issues/1307 | https://github.com/jdi-testing/jdi-light/pull/1506 | https://github.com/jdi-testing/jdi-light/pull/1506 | 1 | fix | Fix form methods() for forms that have more than one button on it | Could be checked in these branch
https://github.com/jdi-testing/jdi-light/tree/forms_methods_check
Here is the test for it
BootstrapFormTests.java:51 | c2d5ab6d4924efb8f9a4811b64c604d9ed1b56e6 | f04441d8d76425b42e22adb41629fb3941d3e5a8 | https://github.com/jdi-testing/jdi-light/compare/c2d5ab6d4924efb8f9a4811b64c604d9ed1b56e6...f04441d8d76425b42e22adb41629fb3941d3e5a8 | diff --git a/jdi-light-bootstrap-tests/src/main/java/io/github/com/sections/form/SupportMessageForm.java b/jdi-light-bootstrap-tests/src/main/java/io/github/com/sections/form/SupportMessageForm.java
index 9d6831694..e8c9e9cb4 100644
--- a/jdi-light-bootstrap-tests/src/main/java/io/github/com/sections/form/SupportMessageForm.java
+++ b/jdi-light-bootstrap-tests/src/main/java/io/github/com/sections/form/SupportMessageForm.java
@@ -1,7 +1,7 @@
package io.github.com.sections.form;
-import com.epam.jdi.light.ui.html.elements.common.Button;
import com.epam.jdi.light.ui.bootstrap.elements.composite.Form;
+import com.epam.jdi.light.ui.html.elements.common.Button;
import com.epam.jdi.light.ui.html.elements.common.TextArea;
import com.epam.jdi.light.ui.html.elements.common.TextField;
import io.github.com.entities.SupportMessage;
diff --git a/jdi-light-bootstrap-tests/src/test/java/io/github/epam/bootstrap/tests/composite/section/form/SimpleFormTests.java b/jdi-light-bootstrap-tests/src/test/java/io/github/epam/bootstrap/tests/composite/section/form/SimpleFormTests.java
index 659772c90..23c4db3cb 100644
--- a/jdi-light-bootstrap-tests/src/test/java/io/github/epam/bootstrap/tests/composite/section/form/SimpleFormTests.java
+++ b/jdi-light-bootstrap-tests/src/test/java/io/github/epam/bootstrap/tests/composite/section/form/SimpleFormTests.java
@@ -41,7 +41,6 @@ public class SimpleFormTests extends TestsInit {
setDefaultValues();
supportMessageForm.submit(EXAMPLE_MESSAGE);
lastLogEntry.has().text(containsString(logLineSubmit));
- supportMessageForm.check(EXAMPLE_MESSAGE);
}
@Test
@@ -53,13 +52,11 @@ public class SimpleFormTests extends TestsInit {
supportMessageForm.check(EXAMPLE_MESSAGE);
}
- // 05.12.19: Test will be failed. Reason: Button "Clear" is not working now in UI from https://jdi-testing.github.io/jdi-light/bootstrap_forms.html
@Test
public void clearFormTest() {
setDefaultValues();
supportMessageForm.clear(EXAMPLE_MESSAGE);
lastLogEntry.has().text(containsString(logLineClear));
- supportMessageForm.check(TEMPLATE_MESSAGE);
}
@Test
diff --git a/jdi-light/src/main/java/com/epam/jdi/light/common/UIUtils.java b/jdi-light/src/main/java/com/epam/jdi/light/common/UIUtils.java
index f1c24ac0f..87adc9a9b 100644
--- a/jdi-light/src/main/java/com/epam/jdi/light/common/UIUtils.java
+++ b/jdi-light/src/main/java/com/epam/jdi/light/common/UIUtils.java
@@ -60,6 +60,9 @@ public final class UIUtils {
Collection<IClickable> buttons = select(fields,
f -> (IClickable) getValueField(f, obj));
IClickable button = first(buttons, b -> namesEqual(toButton(((HasValue) b).getValue()), toButton(buttonName)));
+ if (button == null) {
+ button = first(buttons, b -> namesEqual(toButton(((INamed) b).getName()), toButton(buttonName)));
+ }
if (button != null)
return button;
} | ['jdi-light/src/main/java/com/epam/jdi/light/common/UIUtils.java', 'jdi-light-bootstrap-tests/src/main/java/io/github/com/sections/form/SupportMessageForm.java', 'jdi-light-bootstrap-tests/src/test/java/io/github/epam/bootstrap/tests/composite/section/form/SimpleFormTests.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 654,884 | 151,229 | 18,629 | 312 | 176 | 36 | 3 | 1 | 154 | 14 | 35 | 5 | 1 | 0 | 1970-01-01T00:26:15 | 98 | Java | {'Java': 5910794, 'Gherkin': 47856, 'Shell': 14702, 'XSLT': 6290, 'Batchfile': 79} | MIT License |
9,363 | eclipse-vertx/vertx-codegen/193/192 | eclipse-vertx | vertx-codegen | https://github.com/eclipse-vertx/vertx-codegen/issues/192 | https://github.com/eclipse-vertx/vertx-codegen/pull/193 | https://github.com/eclipse-vertx/vertx-codegen/pull/193 | 1 | fixes | IndexOutOfBoundsException when processing javadocs | codegen will crash while processing javadocs that are:
```
/**
* @deprecated
* @see #handler(RoutingContext ctx)
* @param ctx the context
*/
``` | 4ac5bb3f00199619d607959caac1ab799f427f6b | 94cb4c7e572c222c219e676c709c7fa579fd0c82 | https://github.com/eclipse-vertx/vertx-codegen/compare/4ac5bb3f00199619d607959caac1ab799f427f6b...94cb4c7e572c222c219e676c709c7fa579fd0c82 | diff --git a/src/main/java/io/vertx/codegen/doc/Doc.java b/src/main/java/io/vertx/codegen/doc/Doc.java
index 0daa8c1d..f0c03fd1 100644
--- a/src/main/java/io/vertx/codegen/doc/Doc.java
+++ b/src/main/java/io/vertx/codegen/doc/Doc.java
@@ -49,7 +49,13 @@ public class Doc {
String name = matcher.group(2);
int prev = matcher.end() + 1;
if (matcher.find()) {
- blockTags.add(new Tag(name, javadoc.substring(prev, matcher.start())));
+ int start = matcher.start();
+ if (start <= prev) {
+ // this is a tag without content (e.g.: deprecated)
+ blockTags.add(new Tag(name, ""));
+ } else {
+ blockTags.add(new Tag(name, javadoc.substring(prev, start)));
+ }
} else {
blockTags.add(new Tag(name, javadoc.substring(prev)));
break;
diff --git a/src/test/java/io/vertx/test/codegen/DocTest.java b/src/test/java/io/vertx/test/codegen/DocTest.java
index b8209456..e6701b3d 100644
--- a/src/test/java/io/vertx/test/codegen/DocTest.java
+++ b/src/test/java/io/vertx/test/codegen/DocTest.java
@@ -50,6 +50,8 @@ public class DocTest {
assertComment("@tag1 value", "", null, new Tag("tag1", "value"));
assertComment("\\n@tag1 value", "", null, new Tag("tag1", "value"));
assertComment("@tag1 value1\\n@tag2 value2", "", null, new Tag("tag1", "value1"), new Tag("tag2", "value2"));
+
+ assertComment("@deprecated\\n@see #handler(RountingContext)", "", null, new Tag("deprecated", ""), new Tag("see", "#handler(RountingContext)"));
}
private void assertComment(String text, String expectedFirstSentence, String expectedBody, Tag... expectedBlockTags) { | ['src/test/java/io/vertx/test/codegen/DocTest.java', 'src/main/java/io/vertx/codegen/doc/Doc.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 559,943 | 127,707 | 16,330 | 129 | 374 | 79 | 8 | 1 | 158 | 23 | 38 | 9 | 0 | 1 | 1970-01-01T00:25:29 | 98 | Java | {'Java': 1488211} | Apache License 2.0 |
243 | inl/blacklab/237/236 | inl | blacklab | https://github.com/INL/BlackLab/issues/236 | https://github.com/INL/BlackLab/pull/237 | https://github.com/INL/BlackLab/pull/237#issuecomment-994888174 | 1 | fixes | Retrieving hit group contents no longer works | It seems that since [this commit](https://github.com/INL/BlackLab/commit/fa8720487f8aee79ad8bd600ae98173e04abc4ec) BlackLab always launches a hitsWindow() search, even when there is a groupBy and viewGroup parameter, causing them to be ignored and group contents requests to return the entire (ungrouped) source hits.
| 1fe8377706234f5b46b9f85b97fa892ed2a418fc | 51971210049568bcc1c3c7421b5a34faf09b8fd3 | https://github.com/inl/blacklab/compare/1fe8377706234f5b46b9f85b97fa892ed2a418fc...51971210049568bcc1c3c7421b5a34faf09b8fd3 | diff --git a/server/src/main/java/nl/inl/blacklab/server/requesthandlers/RequestHandlerHits.java b/server/src/main/java/nl/inl/blacklab/server/requesthandlers/RequestHandlerHits.java
index f36cfa4b..e5283a86 100644
--- a/server/src/main/java/nl/inl/blacklab/server/requesthandlers/RequestHandlerHits.java
+++ b/server/src/main/java/nl/inl/blacklab/server/requesthandlers/RequestHandlerHits.java
@@ -103,8 +103,10 @@ public class RequestHandlerHits extends RequestHandler {
ResultsStats hitsCount;
ResultsStats docsCount;
+ boolean viewingGroup = groupBy.length() > 0 && viewGroup.length() > 0;
try {
- if (groupBy.length() > 0 && viewGroup.length() > 0) {
+ if (viewingGroup) {
+ // We're viewing a single group. Get the hits from the grouping results.
Pair<BlsCacheEntry<?>, Hits> res = getHitsFromGroup(groupBy, viewGroup);
cacheEntry = res.getLeft();
hits = res.getRight();
@@ -112,6 +114,7 @@ public class RequestHandlerHits extends RequestHandler {
hitsCount = hits.hitsStats();
docsCount = hits.docsStats();
} else {
+ // Regular hits request. Start the search.
cacheEntry = (BlsCacheEntry<ResultCount>)searchParam.hitsCount().executeAsync(); // always launch totals nonblocking!
hits = searchParam.hitsSample().execute();
hitsCount = ((BlsCacheEntry<ResultCount>)cacheEntry).get();
@@ -136,14 +139,20 @@ public class RequestHandlerHits extends RequestHandler {
if (!hits.hitsStats().processedAtLeast(windowSettings.first()))
throw new BadRequest("HIT_NUMBER_OUT_OF_RANGE", "Non-existent hit number specified.");
- // Request the window of hits we're interested in.
- // (we hold on to the cache entry so that we can differentiate between search and count time later)
- BlsCacheEntry<Hits> cacheEntryWindow = (BlsCacheEntry<Hits>)searchParam.hitsWindow().executeAsync();
+ BlsCacheEntry<Hits> cacheEntryWindow = null;
Hits window;
- try {
- window = cacheEntryWindow.get(); // blocks until requested hits window is available
- } catch (InterruptedException | ExecutionException e) {
- throw RequestHandler.translateSearchException(e);
+ if (!viewingGroup) {
+ // Request the window of hits we're interested in.
+ // (we hold on to the cache entry so that we can differentiate between search and count time later)
+ cacheEntryWindow = (BlsCacheEntry<Hits>) searchParam.hitsWindow().executeAsync();
+ try {
+ window = cacheEntryWindow.get(); // blocks until requested hits window is available
+ } catch (InterruptedException | ExecutionException e) {
+ throw RequestHandler.translateSearchException(e);
+ }
+ } else {
+ // We're viewing a single group in a grouping result. Just get the hits window directly.
+ window = hits.window(windowSettings.first(), windowSettings.size());
}
DocResults perDocResults = null;
@@ -174,7 +183,7 @@ public class RequestHandlerHits extends RequestHandler {
ds.startEntry("summary").startMap();
// Search time should be time user (originally) had to wait for the response to this request.
// Count time is the time it took (or is taking) to iterate through all the results to count the total.
- long searchTime = cacheEntryWindow.timeUserWaitedMs() + kwicTimeMs;
+ long searchTime = (cacheEntryWindow == null ? cacheEntry.timeUserWaitedMs() : cacheEntryWindow.timeUserWaitedMs()) + kwicTimeMs;
long countTime = cacheEntry.threwException() ? -1 : cacheEntry.timeUserWaitedMs();
logger.info("Total search time is:{} ms", searchTime);
addSummaryCommonFields(ds, searchParam, searchTime, countTime, null, window.windowStats()); | ['server/src/main/java/nl/inl/blacklab/server/requesthandlers/RequestHandlerHits.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,518,144 | 740,705 | 99,981 | 600 | 1,892 | 389 | 27 | 1 | 319 | 37 | 85 | 2 | 1 | 0 | 1970-01-01T00:27:19 | 85 | Java | {'Java': 4301847, 'JavaScript': 55674, 'C': 19605, 'HTML': 7491, 'Makefile': 7075, 'Shell': 5159, 'Dockerfile': 4470, 'CSS': 1217, 'Stylus': 1142, 'XSLT': 311} | Apache License 2.0 |
353 | hazelcast/hazelcast-simulator/303/292 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/292 | https://github.com/hazelcast/hazelcast-simulator/pull/303 | https://github.com/hazelcast/hazelcast-simulator/pull/303 | 1 | fix | Stabilizer not Failing, error go unreported | Due to a simple error in test code I found this in worker.log
```
grepall worker.log iCacheCas
./worker-10.184.184.149-1-server/worker.log
INFO 2014-09-11 12:48:24,900 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test iCacheCas ---------------------------
id=iCacheCas
INFO 2014-09-11 12:48:26,081 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.setup() ------------------------------------
FATAL 2014-09-11 12:48:26,081 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.setup() ---------------------------
INFO 2014-09-11 12:48:28,098 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localWarmup() ------------------------------------
INFO 2014-09-11 12:48:28,098 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localWarmup() ---------------------------
INFO 2014-09-11 12:48:32,115 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Skipping iCacheCas.run(); member is passive ------------------------------------
INFO 2014-09-11 12:50:33,277 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- iCacheCas.stop() ------------------------------------
INFO 2014-09-11 12:50:36,310 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localVerify() ------------------------------------
INFO 2014-09-11 12:50:36,310 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localVerify() ---------------------------
INFO 2014-09-11 12:50:41,327 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localTeardown() ------------------------------------
FATAL 2014-09-11 12:50:41,327 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.localTeardown() ---------------------------
./worker-10.67.172.55-2-client/worker.log
INFO 2014-09-11 12:48:25,439 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test iCacheCas ---------------------------
id=iCacheCas
INFO 2014-09-11 12:48:26,623 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.setup() ------------------------------------
FATAL 2014-09-11 12:48:26,715 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.setup() ---------------------------
INFO 2014-09-11 12:48:28,701 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localWarmup() ------------------------------------
INFO 2014-09-11 12:48:28,702 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localWarmup() ---------------------------
INFO 2014-09-11 12:48:32,717 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.run() ------------------------------------
FATAL 2014-09-11 12:48:32,717 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed to execute iCacheCas.run() ------------------------------------
INFO 2014-09-11 12:50:33,875 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- iCacheCas.stop() ------------------------------------
INFO 2014-09-11 12:50:36,889 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localVerify() ------------------------------------
INFO 2014-09-11 12:50:36,889 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localVerify() ---------------------------
INFO 2014-09-11 12:50:41,916 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localTeardown() ------------------------------------
FATAL 2014-09-11 12:50:41,916 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.localTeardown() ---------------------------
./worker-10.184.184.149-2-client/worker.log
INFO 2014-09-11 12:48:25,409 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test iCacheCas ---------------------------
id=iCacheCas
INFO 2014-09-11 12:48:26,578 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.setup() ------------------------------------
FATAL 2014-09-11 12:48:26,689 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.setup() ---------------------------
INFO 2014-09-11 12:48:28,590 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localWarmup() ------------------------------------
INFO 2014-09-11 12:48:28,590 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localWarmup() ---------------------------
INFO 2014-09-11 12:48:32,608 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.run() ------------------------------------
FATAL 2014-09-11 12:48:32,609 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed to execute iCacheCas.run() ------------------------------------
INFO 2014-09-11 12:50:33,769 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- iCacheCas.stop() ------------------------------------
INFO 2014-09-11 12:50:36,795 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localVerify() ------------------------------------
INFO 2014-09-11 12:50:36,795 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localVerify() ---------------------------
INFO 2014-09-11 12:50:41,821 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localTeardown() ------------------------------------
FATAL 2014-09-11 12:50:41,821 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.localTeardown() ---------------------------
./worker-10.67.172.55-1-server/worker.log
INFO 2014-09-11 12:48:25,857 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test iCacheCas ---------------------------
id=iCacheCas
INFO 2014-09-11 12:48:27,000 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.setup() ------------------------------------
FATAL 2014-09-11 12:48:27,000 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.setup() ---------------------------
INFO 2014-09-11 12:48:29,020 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localWarmup() ------------------------------------
INFO 2014-09-11 12:48:29,020 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localWarmup() ---------------------------
INFO 2014-09-11 12:48:31,028 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.globalWarmup() ------------------------------------
FATAL 2014-09-11 12:48:31,044 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.globalWarmup() ---------------------------
INFO 2014-09-11 12:48:33,037 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Skipping iCacheCas.run(); member is passive ------------------------------------
INFO 2014-09-11 12:50:33,199 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- iCacheCas.stop() ------------------------------------
INFO 2014-09-11 12:50:35,213 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.globalVerify() ------------------------------------
FATAL 2014-09-11 12:50:35,213 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.globalVerify() ---------------------------
INFO 2014-09-11 12:50:37,223 [Thread-7] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localVerify() ------------------------------------
INFO 2014-09-11 12:50:37,224 [Thread-7] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.localVerify() ---------------------------
INFO 2014-09-11 12:50:39,235 [Thread-8] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.globalTeardown() ------------------------------------
INFO 2014-09-11 12:50:39,235 [Thread-8] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished iCacheCas.globalTeardown() ---------------------------
INFO 2014-09-11 12:50:42,266 [Thread-9] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting iCacheCas.localTeardown() ------------------------------------
FATAL 2014-09-11 12:50:42,266 [Thread-9] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.localTeardown() ---------------------------
```
While Stabilizer reported
```
INFO 12:48:24 iCacheCas iCacheCas Starting Test initialization
INFO 12:48:25 iCacheCas Completed Test initialization
INFO 12:48:25 iCacheCas Starting Test setup
INFO 12:48:27 iCacheCas Completed Test setup
INFO 12:48:28 iCacheCas Starting Test local warmup
INFO 12:48:30 iCacheCas Completed Test local warmup
INFO 12:48:30 iCacheCas Starting Test global warmup
INFO 12:48:32 iCacheCas Completed Test global warmup
INFO 12:48:32 iCacheCas Starting Test start
INFO 12:48:33 iCacheCas Completed Test start
INFO 12:48:33 iCacheCas Test will run for 00d 00h 02m 00s
INFO 12:49:03 iCacheCas Running 00d 00h 00m 30s, 25.00 percent complete
INFO 12:49:33 iCacheCas Running 00d 00h 01m 00s, 50.00 percent complete
INFO 12:50:03 iCacheCas Running 00d 00h 01m 30s, 75.00 percent complete
INFO 12:50:33 iCacheCas Running 00d 00h 02m 00s, 100.00 percent complete
INFO 12:50:33 iCacheCas Test finished running
INFO 12:50:33 iCacheCas Starting Test stop
INFO 12:50:34 iCacheCas Completed Test stop
INFO 12:50:34 iCacheCas Starting Test global verify
INFO 12:50:36 iCacheCas Completed Test global verify
INFO 12:50:36 iCacheCas Starting Test local verify
INFO 12:50:38 iCacheCas Completed Test local verify
INFO 12:50:38 iCacheCas Starting Test global tear down
INFO 12:50:40 iCacheCas Finished Test global tear down
INFO 12:50:40 iCacheCas Starting Test local tear down
INFO 12:50:42 iCacheCas Completed Test local tear down
INFO 12:50:42 Terminating workers
INFO 12:50:42 All workers have been terminated
INFO 12:50:42 Starting cool down (10 sec)
INFO 12:50:52 Finished cool down
INFO 12:50:52 Total running time: 147 seconds
INFO 12:50:52 -----------------------------------------------------------------------------
INFO 12:50:52 No failures have been detected!
INFO 12:50:52 -----------------------------------------------------------------------------
```
details of setup error
```
due to a simple error in my test setup method, I found this error in worker.log
```
FATAL 2014-09-11 12:48:26,081 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.setup() ---------------------------
FATAL 2014-09-11 12:48:26,082 [Thread-2] com.hazelcast.stabilizer.tests.utils.ExceptionReporter: Exception detected
java.lang.NullPointerException: Retrieving a list instance with a null name is not allowed!
at com.hazelcast.instance.HazelcastInstanceImpl.getList(HazelcastInstanceImpl.java:208)
at com.hazelcast.instance.HazelcastInstanceProxy.getList(HazelcastInstanceProxy.java:91)
at com.hazelcast.stabilizer.tests.icache.CasICacheTest.setup(CasICacheTest.java:66)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.hazelcast.stabilizer.worker.TestContainer.invoke(TestContainer.java:138)
at com.hazelcast.stabilizer.worker.TestContainer.setup(TestContainer.java:101)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.hazelcast.stabilizer.worker.MemberWorker$TestCommandRequestProcessingThread$2.doRun(MemberWorker.java:405)
at com.hazelcast.stabilizer.worker.MemberWorker$CommandThread.run(MemberWorker.java:493)
```
And due to a simple error in localTeardown
```
FATAL 2014-09-11 12:50:41,327 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed iCacheCas.localTeardown() ---------------------------
FATAL 2014-09-11 12:50:41,327 [Thread-6] com.hazelcast.stabilizer.tests.utils.ExceptionReporter: Exception detected
java.lang.NullPointerException
at com.hazelcast.stabilizer.tests.icache.CasICacheTest.teardown(CasICacheTest.java:82)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.hazelcast.stabilizer.worker.TestContainer.invoke(TestContainer.java:138)
at com.hazelcast.stabilizer.worker.TestContainer.localTeardown(TestContainer.java:109)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.hazelcast.stabilizer.worker.MemberWorker$TestCommandRequestProcessingThread$2.doRun(MemberWorker.java:405)
at com.hazelcast.stabilizer.worker.MemberWorker$CommandThread.run(MemberWorker.java:493)
```
```
| 102611f184ba8e097e82e8695398e3afcf551f57 | 789659dfe2e6bdf65e9ac98a4351511e7b083852 | https://github.com/hazelcast/hazelcast-simulator/compare/102611f184ba8e097e82e8695398e3afcf551f57...789659dfe2e6bdf65e9ac98a4351511e7b083852 | diff --git a/stabilizer/src/main/java/com/hazelcast/stabilizer/tests/utils/ExceptionReporter.java b/stabilizer/src/main/java/com/hazelcast/stabilizer/tests/utils/ExceptionReporter.java
index 1f92e2fa1..f8efb749b 100644
--- a/stabilizer/src/main/java/com/hazelcast/stabilizer/tests/utils/ExceptionReporter.java
+++ b/stabilizer/src/main/java/com/hazelcast/stabilizer/tests/utils/ExceptionReporter.java
@@ -15,7 +15,7 @@ import static com.hazelcast.stabilizer.Utils.writeText;
*/
public class ExceptionReporter {
- private final static AtomicLong FAILURE_ID = new AtomicLong(1);
+ private final static AtomicLong FAILURE_ID = new AtomicLong(0);
private final static ILogger log = Logger.getLogger(ExceptionReporter.class);
/**
@@ -31,16 +31,27 @@ public class ExceptionReporter {
return;
}
- log.severe("Exception detected", cause);
+ long exceptionCount = FAILURE_ID.incrementAndGet();
+ log.severe("Exception #" + exceptionCount + " detected", cause);
- final File tmpFile = createTmpFile();
- if (tmpFile == null) {
+ String targetFileName = exceptionCount + ".exception";
+
+ final File tmpFile = new File(targetFileName + ".tmp");
+
+ try {
+ if (!tmpFile.createNewFile()) {
+ // can't happen since id's are always incrementing. So just for safety reason this is added.
+ throw new IOException("Could not create tmp file:" + tmpFile.getAbsolutePath()+" file already exists.");
+ }
+ } catch (IOException e) {
+ log.severe("Could not report exception; this means that this exception is not visible to the coordinator", e);
return;
}
writeCauseToFile(testId, cause, tmpFile);
- final File file = new File(FAILURE_ID.incrementAndGet() + ".exception");
+ final File file = new File(targetFileName);
+
if (!tmpFile.renameTo(file)) {
log.severe("Failed to rename tmp file:" + tmpFile + " to " + file);
}
diff --git a/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java b/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
index 6b0c3f86b..4c62ac522 100644
--- a/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
+++ b/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
@@ -346,7 +346,7 @@ public class MemberWorker {
return;
}
- new CommandThread(command, testId) {
+ CommandThread commandThread = new CommandThread(command, testId) {
@Override
public void doRun() throws Throwable {
boolean passive = command.clientOnly && clientInstance == null;
@@ -365,15 +365,16 @@ public class MemberWorker {
log.info(format("--------------------------- Completed %s.run() " +
"------------------------------------",
testName));
- }catch(Throwable t){
+ } catch (Throwable t) {
String msg = format("--------------------------- Failed to execute %s.run() " +
"------------------------------------",
testName);
- log.severe(msg,t);
+ log.severe(msg, t);
}
}
}
- }.start();
+ };
+ commandThread.start();
} catch (Exception e) {
log.severe("Failed to start test", e);
throw e;
@@ -395,7 +396,7 @@ public class MemberWorker {
}
final Method method = test.getClass().getMethod(methodName);
- new CommandThread(command, command.testId) {
+ CommandThread commandThread = new CommandThread(command, command.testId) {
@Override
public void doRun() throws Throwable {
log.info(format("--------------------------- Starting %s.%s() ------------------------------------",
@@ -415,7 +416,8 @@ public class MemberWorker {
}
}
}
- }.start();
+ };
+ commandThread.start();
} catch (Exception e) {
log.severe(format("Failed to execute test.%s()", methodName), e);
throw e;
@@ -492,7 +494,7 @@ public class MemberWorker {
commands.put(testId, command);
doRun();
} catch (Throwable t) {
- ExceptionReporter.report(null, t);
+ ExceptionReporter.report(testId, t);
} finally {
commands.remove(testId);
} | ['stabilizer/src/main/java/com/hazelcast/stabilizer/tests/utils/ExceptionReporter.java', 'stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 294,582 | 58,054 | 8,320 | 78 | 781 | 120 | 16 | 1 | 14,771 | 889 | 3,786 | 161 | 0 | 5 | 1970-01-01T00:23:30 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
342 | hazelcast/hazelcast-simulator/1325/1324 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/1324 | https://github.com/hazelcast/hazelcast-simulator/pull/1325 | https://github.com/hazelcast/hazelcast-simulator/pull/1325 | 1 | fix | SIMULATOR_USER rename warning | If the old 'USER' is found in Simulator properties, a big fat error should be given | 4c9b5b845a6d75f002b25e6127bf986ff4f8fc71 | 6e7f9dc259f0925cef9a99c36f09e2dc95df08d3 | https://github.com/hazelcast/hazelcast-simulator/compare/4c9b5b845a6d75f002b25e6127bf986ff4f8fc71...6e7f9dc259f0925cef9a99c36f09e2dc95df08d3 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
index 902a72d0e..7e4331681 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
@@ -119,6 +119,7 @@ public class SimulatorProperties {
for (Map.Entry entry : p.entrySet()) {
String key = (String) entry.getKey();
+
String value = (String) entry.getValue();
properties.put(key, new Value(isDefault, value));
}
@@ -126,6 +127,10 @@ public class SimulatorProperties {
if (containsKey("HAZELCAST_VERSION_SPEC")) {
throw new IOException("'HAZELCAST_VERSION_SPEC' property is deprecated, Use 'VERSION_SPEC' instead.");
}
+
+ if (containsKey("USER")) {
+ throw new IOException("'USER' property is deprecated, Use 'SIMULATOR_USER' instead.");
+ }
} catch (IOException e) {
throw rethrow(e);
} finally {
diff --git a/simulator/src/test/java/com/hazelcast/simulator/common/SimulatorPropertiesTest.java b/simulator/src/test/java/com/hazelcast/simulator/common/SimulatorPropertiesTest.java
index d7bc54506..efdf35de0 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/common/SimulatorPropertiesTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/common/SimulatorPropertiesTest.java
@@ -6,6 +6,7 @@ import org.junit.Before;
import org.junit.Test;
import java.io.File;
+import java.io.IOException;
import static com.hazelcast.simulator.TestEnvironmentUtils.setupFakeEnvironment;
import static com.hazelcast.simulator.TestEnvironmentUtils.tearDownFakeEnvironment;
@@ -238,6 +239,14 @@ public class SimulatorPropertiesTest {
assertEquals("testIdentity", simulatorProperties.getCloudCredential());
}
+ @Test(expected = CommandLineExitException.class)
+ public void test_USER() {
+ File workingDirProperties = new File(simulatorHome, "simulator.properties");
+ appendText("USER=foobar", workingDirProperties);
+
+ simulatorProperties.load(workingDirProperties);
+ }
+
private void initProperty(File file, String key, String value) {
System.out.println("writing value: " + value);
appendText(format("%s=%s", key, value), file); | ['simulator/src/test/java/com/hazelcast/simulator/common/SimulatorPropertiesTest.java', 'simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,025,220 | 205,376 | 26,884 | 227 | 162 | 31 | 5 | 1 | 83 | 16 | 19 | 1 | 0 | 0 | 1970-01-01T00:24:48 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
341 | hazelcast/hazelcast-simulator/1366/1248 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/1248 | https://github.com/hazelcast/hazelcast-simulator/pull/1366 | https://github.com/hazelcast/hazelcast-simulator/pull/1366 | 1 | fixed | Problem starting members, doesn't lead to failure | When there is a problem starting members, this isn't translated to a nice and useful message
```
INFO 11:25:49 =========================================================================
INFO 11:25:49 Starting 2 Workers (2 members, 0 clients)...
FATAL 11:25:50 Could not create 1 member Worker on C_A1 (EXCEPTION_DURING_OPERATION_EXECUTION)
FATAL 11:25:50 [C_A2] Failed to start Worker: Failed to start Worker
FATAL 11:25:51 [C_A1] Failed to start Worker: Failed to start Worker
INFO 11:25:51 Terminating 0 Workers...
INFO 11:25:52 Waiting up to 120 seconds for shutdown of 0 Workers...
INFO 11:25:52 Finished shutdown of all Workers (0 seconds)
INFO 11:25:52 Shutdown of com.hazelcast.simulator.protocol.connector.CoordinatorConnector...
INFO 11:25:52 ServerConnectorMessageQueueThread received POISON_PILL and will stop...
INFO 11:25:52 HarakiriMonitor is enabled and will kill inactive EC2 instances after 7200 seconds
INFO 11:25:52 Stopping 2 Agents...
INFO 11:25:52 Stopping Agent 54.158.82.62
INFO 11:25:52 Stopping Agent 54.205.232.99
INFO 11:25:54 Starting HarakiriMonitor on 54.205.232.99
INFO 11:25:54 Starting HarakiriMonitor on 54.158.82.62
INFO 11:25:56 Successfully stopped 2 Agents
INFO 11:25:56 Download artifacts of 2 machines...
INFO 11:25:56 Downloading Worker logs from 54.205.232.99
INFO 11:25:56 Downloading Worker logs from 54.158.82.62
INFO 11:25:56 Downloading Agent logs from 54.158.82.62
INFO 11:25:56 Downloading Agent logs from 54.205.232.99
INFO 11:26:04 Finished downloading artifacts of 2 machines (7 seconds)
INFO 11:26:04 Executing after-completion script: /home/alarmnummer/hazelcast-simulator-0.9-RC1-SNAPSHOT/conf/after-completion.sh
INFO 11:26:04 Finished after-completion script
INFO 11:26:04 =========================================================================
INFO 11:26:04 No failures have been detected!
INFO 11:26:04 =========================================================================
```
| c7af9b0b66fb0f19692b00e9c4d1d79c4f2ba165 | 5c913ce771a28a5d33056be57e5fbae3649aee77 | https://github.com/hazelcast/hazelcast-simulator/compare/c7af9b0b66fb0f19692b00e9c4d1d79c4f2ba165...5c913ce771a28a5d33056be57e5fbae3649aee77 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/common/FailureType.java b/simulator/src/main/java/com/hazelcast/simulator/common/FailureType.java
index e3fb5f524..352c7024d 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/common/FailureType.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/common/FailureType.java
@@ -27,7 +27,8 @@ public enum FailureType {
WORKER_TIMEOUT("workerTimeout", "Worker timeout", false),
WORKER_OOME("workerOOME", "Worker OOME", true),
WORKER_ABNORMAL_EXIT("workerAbnormalExit", "Worker abnormal exit", true),
- WORKER_NORMAL_EXIT("workerNormalExit", "Worker normal exit", true);
+ WORKER_NORMAL_EXIT("workerNormalExit", "Worker normal exit", true),
+ WORKER_CREATE_ERROR("workerCreateError", "Worker create error", true);
private final String id;
private final String humanReadable;
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/CoordinatorRunMonolith.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/CoordinatorRunMonolith.java
index 04a48723d..878b56c48 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/CoordinatorRunMonolith.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/CoordinatorRunMonolith.java
@@ -16,7 +16,9 @@
package com.hazelcast.simulator.coordinator;
+import com.hazelcast.simulator.common.FailureType;
import com.hazelcast.simulator.common.TestPhase;
+import com.hazelcast.simulator.protocol.operation.FailureOperation;
import com.hazelcast.simulator.utils.CommandLineExitException;
import org.apache.log4j.Logger;
@@ -48,7 +50,13 @@ class CoordinatorRunMonolith {
public void init(DeploymentPlan deploymentPlan) {
logConfiguration(deploymentPlan);
- coordinator.createStartWorkersTask(deploymentPlan.getWorkerDeployment(), new HashMap<String, String>()).run();
+ try {
+ coordinator.createStartWorkersTask(deploymentPlan.getWorkerDeployment(), new HashMap<String, String>()).run();
+ } catch (RuntimeException e) {
+ failureCollector.notify(
+ new FailureOperation("Failed to create worker", FailureType.WORKER_CREATE_ERROR, null, null, null));
+ throw e;
+ }
}
public boolean run(TestSuite testSuite) throws Exception {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
index 83dacc0ca..8218a0548 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
@@ -15,6 +15,7 @@
*/
package com.hazelcast.simulator.coordinator;
+import com.hazelcast.simulator.common.FailureType;
import com.hazelcast.simulator.protocol.operation.FailureOperation;
import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.protocol.registry.TestData;
@@ -62,21 +63,24 @@ public class FailureCollector {
public void notify(FailureOperation failure) {
failure = enrich(failure);
- WorkerData worker = componentRegistry.findWorker(failure.getWorkerAddress());
- if (worker == null) {
- // we are not interested in failures of workers that aren't registered any longer.
- return;
- }
+ if (failure.getType() != FailureType.WORKER_CREATE_ERROR) {
+ WorkerData worker = componentRegistry.findWorker(failure.getWorkerAddress());
+ if (worker == null) {
+ // we are not interested in failures of workers that aren't registered any longer.
+ return;
+ }
- // it the failure is the terminal for that workers, we need to remove it from the component registry
- if (failure.getType().isTerminal()) {
- LOGGER.info("Removing worker " + worker.getAddress() + " from componentRegistry due to [" + failure.getType() + "]");
- componentRegistry.removeWorker(worker.getAddress());
- }
+ // it the failure is the terminal for that workers, we need to remove it from the component registry
+ if (failure.getType().isTerminal()) {
+ LOGGER.info("Removing worker " + worker.getAddress()
+ + " from componentRegistry due to [" + failure.getType() + "]");
+ componentRegistry.removeWorker(worker.getAddress());
+ }
- // if we don't care for the failure, we are done; no need to log anything.
- if (worker.isIgnoreFailures() || failure.getType() == WORKER_NORMAL_EXIT) {
- return;
+ // if we don't care for the failure, we are done; no need to log anything.
+ if (worker.isIgnoreFailures() || failure.getType() == WORKER_NORMAL_EXIT) {
+ return;
+ }
}
int failureCount = criticalFailureCounter.incrementAndGet();
@@ -107,6 +111,10 @@ public class FailureCollector {
}
private void logFailure(FailureOperation failure, long failureCount, boolean isCriticalFailure) {
+ if (failure.getType() == FailureType.WORKER_CREATE_ERROR) {
+ return;
+ }
+
int failureNumber = failureNumberGenerator.incrementAndGet();
if (failureCount < MAX_CONSOLE_FAILURE_COUNT) {
if (isCriticalFailure) {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
index cc961b729..57f54a003 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
@@ -42,14 +42,14 @@ public class FailureOperation implements SimulatorOperation {
public FailureOperation(String message, FailureType type, SimulatorAddress workerAddress, String agentAddress,
Throwable cause) {
- this(message, type, workerAddress, agentAddress, null, null, null, throwableToString(cause));
+ this(message, type, workerAddress, agentAddress, null, null, null, cause == null ? "" : throwableToString(cause));
}
public FailureOperation(String message, FailureType type, SimulatorAddress workerAddress, String agentAddress,
String hzAddress, String workerId, String testId, String cause) {
this.message = message;
this.type = type.name();
- this.workerAddress = (workerAddress == null) ? null : workerAddress.toString();
+ this.workerAddress = workerAddress == null ? null : workerAddress.toString();
this.agentAddress = agentAddress;
this.hzAddress = hzAddress;
this.workerId = workerId; | ['simulator/src/main/java/com/hazelcast/simulator/common/FailureType.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/CoordinatorRunMonolith.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 1,032,470 | 206,930 | 27,075 | 228 | 3,152 | 615 | 51 | 4 | 1,968 | 218 | 577 | 33 | 0 | 1 | 1970-01-01T00:24:48 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
340 | hazelcast/hazelcast-simulator/1424/1423 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/1423 | https://github.com/hazelcast/hazelcast-simulator/pull/1424 | https://github.com/hazelcast/hazelcast-simulator/pull/1424 | 1 | fix | Bug in harakiri | The problem is in the startup of the harakiri monitor after a test completes. Instead of setting the actual key/value, the literal values are taken. So if you have your identity configured as '''bla/ec2.identity''', then '''bla/ec2.identity''' is the literal value.
The cause of this is the SimulatorProperties.asMap function which returns the values without replacements being applied. | d8d4ebf4f4b0b67e99d1cf289868b0daa41dee83 | db6a948f879dfacba235f8a67774256782100bb8 | https://github.com/hazelcast/hazelcast-simulator/compare/d8d4ebf4f4b0b67e99d1cf289868b0daa41dee83...db6a948f879dfacba235f8a67774256782100bb8 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
index d4638aefd..5ef21b522 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
+import static com.hazelcast.simulator.utils.CloudProviderUtils.isTrueCloud;
import static com.hazelcast.simulator.utils.CommonUtils.closeQuietly;
import static com.hazelcast.simulator.utils.CommonUtils.rethrow;
import static com.hazelcast.simulator.utils.FileUtils.fileAsText;
@@ -74,6 +75,17 @@ public class SimulatorProperties {
String key = (String) entry.getKey();
map.put(key, get(key));
}
+
+ String cloudProvider = getCloudProvider();
+ if (!isTrueCloud(cloudProvider)) {
+ // if isn't a true cloud, we don't for the identity/credentials.
+ return map;
+ }
+
+ // it is a true cloud; so we care for the identity/credentials. They need to be configured correctly or
+ // else an exception is thrown.
+ map.put(CLOUD_IDENTITY, getCloudIdentity());
+ map.put(CLOUD_CREDENTIAL, getCloudCredential());
return map;
}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/AgentUtils.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/AgentUtils.java
index 08376fde9..5be4840ac 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/AgentUtils.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/AgentUtils.java
@@ -28,9 +28,9 @@ public final class AgentUtils {
}
public static void startAgents(SimulatorProperties properties, ComponentRegistry registry) {
- String startscript = getConfigurationFile("agent_start.sh").getAbsolutePath();
+ String startScript = getConfigurationFile("agent_start.sh").getAbsolutePath();
- new BashCommand(startscript)
+ new BashCommand(startScript)
.addParams(join(registry.getAgentIps(), ","))
.addEnvironment(properties.asMap())
.execute();
diff --git a/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java b/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java
index 471eca063..629b0f415 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java
@@ -47,6 +47,12 @@ public final class CloudProviderUtils {
}
}
+ public static boolean isTrueCloud(String cloudProvider) {
+ return !cloudProvider.equals("embedded")
+ && !cloudProvider.equals("local")
+ && !cloudProvider.equals("static");
+ }
+
public static boolean isStatic(SimulatorProperties properties) {
return isStatic(properties.getCloudProvider());
} | ['simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/AgentUtils.java', 'simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 841,823 | 168,898 | 22,266 | 170 | 1,032 | 203 | 22 | 3 | 389 | 56 | 78 | 3 | 0 | 0 | 1970-01-01T00:24:49 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
344 | hazelcast/hazelcast-simulator/1168/1130 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/1130 | https://github.com/hazelcast/hazelcast-simulator/pull/1168 | https://github.com/hazelcast/hazelcast-simulator/pull/1168 | 1 | fixes | Parallel test suites | Currently test suites can't be run in parallel, because in the Agent only 1 testsuite can be active.
| 9b79fa3cfdefab472e5e1c0261150e7e8a51d75d | 2d5d77202337d6bc369d1f45f5a4fc91d3447f24 | https://github.com/hazelcast/hazelcast-simulator/compare/9b79fa3cfdefab472e5e1c0261150e7e8a51d75d...2d5d77202337d6bc369d1f45f5a4fc91d3447f24 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java b/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
index 03c687d95..a87eec8d6 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
@@ -18,7 +18,6 @@ package com.hazelcast.simulator.agent;
import com.hazelcast.simulator.agent.workerprocess.WorkerProcessFailureMonitor;
import com.hazelcast.simulator.agent.workerprocess.WorkerProcessManager;
import com.hazelcast.simulator.common.ShutdownThread;
-import com.hazelcast.simulator.common.TestSuite;
import com.hazelcast.simulator.protocol.connector.AgentConnector;
import com.hazelcast.simulator.protocol.operation.OperationTypeCounter;
import org.apache.log4j.Logger;
@@ -43,7 +42,7 @@ public class Agent {
private static final Logger LOGGER = Logger.getLogger(Agent.class);
private static final AtomicBoolean SHUTDOWN_STARTED = new AtomicBoolean();
- private final WorkerProcessManager workerProcessManager = new WorkerProcessManager();
+ private final WorkerProcessManager workerProcessManager = new WorkerProcessManager();
private final int addressIndex;
private final String publicAddress;
@@ -58,7 +57,6 @@ public class Agent {
private final FailureHandlerImpl failureSender;
private final WorkerProcessFailureMonitor workerProcessFailureMonitor;
- private volatile TestSuite testSuite;
private volatile String sessionId;
public Agent(int addressIndex, String publicAddress, int port, String cloudProvider, String cloudIdentity,
@@ -86,7 +84,7 @@ public class Agent {
}
private void createPidFile() {
- deleteQuiet(pidFile);
+ deleteQuiet(pidFile);
writeText("" + getPID(), pidFile);
}
@@ -118,15 +116,6 @@ public class Agent {
this.sessionId = sessionId;
}
- public void setTestSuite(TestSuite testSuite) {
- this.testSuite = testSuite;
- failureSender.setTestSuite(testSuite);
- }
-
- public TestSuite getTestSuite() {
- return testSuite;
- }
-
public File getSessionDirectory() {
String sessionId = this.sessionId;
if (sessionId == null) {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java b/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
index e9b341ab7..305a5b669 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
@@ -18,7 +18,6 @@ package com.hazelcast.simulator.agent;
import com.hazelcast.simulator.agent.workerprocess.FailureHandler;
import com.hazelcast.simulator.agent.workerprocess.WorkerProcess;
import com.hazelcast.simulator.common.FailureType;
-import com.hazelcast.simulator.common.TestSuite;
import com.hazelcast.simulator.protocol.connector.AgentConnector;
import com.hazelcast.simulator.protocol.core.Response;
import com.hazelcast.simulator.protocol.core.ResponseFuture;
@@ -38,8 +37,6 @@ class FailureHandlerImpl implements FailureHandler {
private final String agentAddress;
private final AgentConnector agentConnector;
- private volatile TestSuite testSuite;
-
private int failureCount;
FailureHandlerImpl(String agentAddress, AgentConnector agentConnector) {
@@ -47,10 +44,6 @@ class FailureHandlerImpl implements FailureHandler {
this.agentConnector = agentConnector;
}
- public void setTestSuite(TestSuite testSuite) {
- this.testSuite = testSuite;
- }
-
@Override
public boolean handle(String message, FailureType type, WorkerProcess workerProcess, String testId, String cause) {
boolean send = true;
@@ -73,7 +66,7 @@ class FailureHandlerImpl implements FailureHandler {
String workerId = workerProcess.getId();
FailureOperation operation = new FailureOperation(message, type, workerAddress, agentAddress,
- workerProcess.getHazelcastAddress(), workerId, testId, testSuite, cause);
+ workerProcess.getHazelcastAddress(), workerId, testId, cause);
if (type.isPoisonPill()) {
LOGGER.info(format("Worker %s (%s) finished.", workerId, workerAddress));
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
index 75c40ca3a..11f8cf975 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
@@ -98,7 +98,7 @@ public final class Coordinator {
this.outputDirectory = ensureNewDirectory(new File(getUserDir(), coordinatorParameters.getSessionId()));
this.componentRegistry = componentRegistry;
this.coordinatorParameters = coordinatorParameters;
- this.failureCollector = new FailureCollector(outputDirectory);
+ this.failureCollector = new FailureCollector(outputDirectory, componentRegistry);
this.failureCollector.addListener(true, new ComponentRegistryFailureListener(componentRegistry));
this.simulatorProperties = coordinatorParameters.getSimulatorProperties();
this.bash = new Bash(simulatorProperties);
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
index 990daedba..472fbcfbc 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java
@@ -17,6 +17,8 @@ package com.hazelcast.simulator.coordinator;
import com.hazelcast.simulator.common.FailureType;
import com.hazelcast.simulator.protocol.operation.FailureOperation;
+import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
+import com.hazelcast.simulator.protocol.registry.TestData;
import com.hazelcast.simulator.utils.CommandLineExitException;
import org.apache.log4j.Logger;
@@ -47,9 +49,11 @@ public class FailureCollector {
private final ConcurrentMap<String, Boolean> hasCriticalFailuresMap = new ConcurrentHashMap<String, Boolean>();
private final File file;
+ private final ComponentRegistry componentRegistry;
- public FailureCollector(File outputDirectory) {
+ public FailureCollector(File outputDirectory, ComponentRegistry componentRegistry) {
this.file = new File(outputDirectory, "failures.txt");
+ this.componentRegistry = componentRegistry;
}
public void addListener(FailureListener listener) {
@@ -61,6 +65,8 @@ public class FailureCollector {
}
public void notify(FailureOperation failure) {
+ failure = enrichWithTestSuite(failure);
+
boolean isFinishedFailure = false;
FailureType failureType = failure.getType();
@@ -92,6 +98,21 @@ public class FailureCollector {
}
}
+ private FailureOperation enrichWithTestSuite(FailureOperation failure) {
+ String testId = failure.getTestId();
+ if (testId == null) {
+ return failure;
+ }
+
+ TestData testData = componentRegistry.getTest(testId);
+ if (testData == null) {
+ return failure;
+ }
+
+ failure.setTestSuite(testData.getTestSuite());
+ return failure;
+ }
+
private void logFailure(FailureOperation failure, long failureCount, boolean isCriticalFailure) {
int failureNumber = failureNumberGenerator.incrementAndGet();
if (failureCount < MAX_CONSOLE_FAILURE_COUNT) {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java
index d706c3d7b..f525396eb 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java
@@ -19,7 +19,6 @@ package com.hazelcast.simulator.coordinator;
import com.hazelcast.simulator.common.TestCase;
import com.hazelcast.simulator.common.TestPhase;
import com.hazelcast.simulator.common.TestSuite;
-import com.hazelcast.simulator.protocol.operation.InitTestSuiteOperation;
import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.protocol.registry.TargetType;
import com.hazelcast.simulator.protocol.registry.TestData;
@@ -67,19 +66,17 @@ public class RunTestSuiteTask {
}
public void run() {
- componentRegistry.addTests(testSuite);
+ List<TestData> tests = componentRegistry.addTests(testSuite);
try {
- run0();
+ run0(tests);
} finally {
testPhaseListeners.removeAllListeners(runners);
- componentRegistry.removeTests();
+ componentRegistry.removeTests(testSuite);
performanceStatsCollector.logDetailedPerformanceInfo(testSuite.getDurationSeconds());
}
}
- private void run0() {
- remoteClient.sendToAllAgents(new InitTestSuiteOperation(testSuite));
-
+ private void run0(List<TestData> tests) {
int testCount = testSuite.size();
boolean parallel = testSuite.isParallel() && testCount > 1;
Map<TestPhase, CountDownLatch> testPhaseSyncMap = getTestPhaseSyncMap(testCount, parallel,
@@ -88,7 +85,7 @@ public class RunTestSuiteTask {
echoer.echo("Starting TestSuite");
echoTestSuiteDuration(parallel);
- for (TestData testData : componentRegistry.getTests()) {
+ for (TestData testData: tests) {
int testIndex = testData.getTestIndex();
TestCase testCase = testData.getTestCase();
echoer.echo("Configuration for %s (T%d):%n%s", testCase.getId(), testIndex, testCase);
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
index adaf698e9..0aaf48a6c 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java
@@ -37,16 +37,16 @@ public class FailureOperation implements SimulatorOperation {
private final String hzAddress;
private final String workerId;
private final String testId;
- private final TestSuite testSuite;
+ private TestSuite testSuite;
private final String cause;
public FailureOperation(String message, FailureType type, SimulatorAddress workerAddress, String agentAddress,
Throwable cause) {
- this(message, type, workerAddress, agentAddress, null, null, null, null, throwableToString(cause));
+ this(message, type, workerAddress, agentAddress, null, null, null, throwableToString(cause));
}
public FailureOperation(String message, FailureType type, SimulatorAddress workerAddress, String agentAddress,
- String hzAddress, String workerId, String testId, TestSuite testSuite, String cause) {
+ String hzAddress, String workerId, String testId, String cause) {
this.message = message;
this.type = type.name();
this.workerAddress = (workerAddress == null) ? null : workerAddress.toString();
@@ -54,10 +54,14 @@ public class FailureOperation implements SimulatorOperation {
this.hzAddress = hzAddress;
this.workerId = workerId;
this.testId = testId;
- this.testSuite = testSuite;
this.cause = cause;
}
+ public FailureOperation setTestSuite(TestSuite testSuite) {
+ this.testSuite = testSuite;
+ return this;
+ }
+
public FailureType getType() {
return FailureType.valueOf(type);
}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/InitTestSuiteOperation.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/InitTestSuiteOperation.java
deleted file mode 100644
index 956de1aa0..000000000
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/InitTestSuiteOperation.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.hazelcast.simulator.protocol.operation;
-
-import com.hazelcast.simulator.common.TestSuite;
-
-public class InitTestSuiteOperation implements SimulatorOperation {
-
- private final TestSuite testSuite;
-
- public InitTestSuiteOperation(TestSuite testSuite) {
- this.testSuite = testSuite;
- }
-
- public TestSuite getTestSuite() {
- return testSuite;
- }
-}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/OperationType.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/OperationType.java
index e475bbb44..057226a28 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/OperationType.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/operation/OperationType.java
@@ -47,12 +47,11 @@ public enum OperationType {
// AgentOperationProcessor
INIT_SESSION(InitSessionOperation.class, 3000),
- INIT_TEST_SUITE(InitTestSuiteOperation.class, 3001),
- CREATE_WORKER(CreateWorkerOperation.class, 3002),
- START_TIMEOUT_DETECTION(StartTimeoutDetectionOperation.class, 3003),
- STOP_TIMEOUT_DETECTION(StopTimeoutDetectionOperation.class, 3004),
- IGNORE_WORKER_FAILURE(IgnoreWorkerFailureOperation.class, 3005),
- BASH(BashOperation.class, 3006),
+ CREATE_WORKER(CreateWorkerOperation.class, 3001),
+ START_TIMEOUT_DETECTION(StartTimeoutDetectionOperation.class, 3002),
+ STOP_TIMEOUT_DETECTION(StopTimeoutDetectionOperation.class, 3003),
+ IGNORE_WORKER_FAILURE(IgnoreWorkerFailureOperation.class, 3004),
+ BASH(BashOperation.class, 3005),
// WorkerOperationProcessor
PING(PingOperation.class, 4000),
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java
index a438d0179..1eea8cc6d 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java
@@ -29,7 +29,6 @@ import com.hazelcast.simulator.protocol.operation.BashOperation;
import com.hazelcast.simulator.protocol.operation.CreateWorkerOperation;
import com.hazelcast.simulator.protocol.operation.IgnoreWorkerFailureOperation;
import com.hazelcast.simulator.protocol.operation.InitSessionOperation;
-import com.hazelcast.simulator.protocol.operation.InitTestSuiteOperation;
import com.hazelcast.simulator.protocol.operation.IntegrationTestOperation;
import com.hazelcast.simulator.protocol.operation.LogOperation;
import com.hazelcast.simulator.protocol.operation.OperationType;
@@ -83,9 +82,6 @@ public class AgentOperationProcessor extends AbstractOperationProcessor {
case INIT_SESSION:
agent.setSessionId(((InitSessionOperation) operation).getSessionId());
break;
- case INIT_TEST_SUITE:
- agent.setTestSuite(((InitTestSuiteOperation) operation).getTestSuite());
- break;
case CREATE_WORKER:
return processCreateWorker((CreateWorkerOperation) operation);
case START_TIMEOUT_DETECTION:
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/ComponentRegistry.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/ComponentRegistry.java
index f198824a9..15223ea32 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/ComponentRegistry.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/ComponentRegistry.java
@@ -29,6 +29,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
@@ -243,8 +244,7 @@ public class ComponentRegistry {
agent.getAddress(),
formatLong(agentMemberWorkerCount, 2),
formatLong(agentClientWorkerCount, 2),
- agentVersionSpecs
- ));
+ agentVersionSpecs));
for (WorkerData worker : agent.getWorkers()) {
LOGGER.info(" Worker " + worker.getAddress() + " " + worker.getSettings().getWorkerType()
@@ -268,17 +268,32 @@ public class ComponentRegistry {
}
}
- public synchronized void addTests(TestSuite testSuite) {
+ public synchronized List<TestData> addTests(TestSuite testSuite) {
for (TestCase testCase : testSuite.getTestCaseList()) {
- int addressIndex = testIndex.incrementAndGet();
- SimulatorAddress testAddress = new SimulatorAddress(AddressLevel.TEST, 0, 0, addressIndex);
- TestData testData = new TestData(addressIndex, testAddress, testCase);
+ if (tests.containsKey(testCase.getId())) {
+ throw new IllegalArgumentException(format("Already a test running with id '%s'", testCase.getId()));
+ }
+ }
+
+ List<TestData> result = new ArrayList<TestData>(testSuite.size());
+ for (TestCase testCase : testSuite.getTestCaseList()) {
+ int testIndex = this.testIndex.incrementAndGet();
+ SimulatorAddress testAddress = new SimulatorAddress(AddressLevel.TEST, 0, 0, testIndex);
+ TestData testData = new TestData(testIndex, testAddress, testCase, testSuite);
+ result.add(testData);
tests.put(testCase.getId(), testData);
}
+ return result;
}
- public void removeTests() {
- tests.clear();
+ public synchronized void removeTests(TestSuite testSuite) {
+ for (Map.Entry<String, TestData> entry : tests.entrySet()) {
+ TestData testData = entry.getValue();
+ if (testData.getTestSuite().equals(testSuite)) {
+ String testId = entry.getKey();
+ tests.remove(testId);
+ }
+ }
}
public int testCount() {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/TestData.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/TestData.java
index 06d6424e1..be9106f9f 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/TestData.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/registry/TestData.java
@@ -16,6 +16,7 @@
package com.hazelcast.simulator.protocol.registry;
import com.hazelcast.simulator.common.TestCase;
+import com.hazelcast.simulator.common.TestSuite;
import com.hazelcast.simulator.protocol.core.SimulatorAddress;
public class TestData {
@@ -23,11 +24,13 @@ public class TestData {
private final int testIndex;
private final SimulatorAddress address;
private final TestCase testCase;
+ private final TestSuite testSuite;
- TestData(int testIndex, SimulatorAddress address, TestCase testCase) {
+ TestData(int testIndex, SimulatorAddress address, TestCase testCase, TestSuite testSuite) {
this.testIndex = testIndex;
this.address = address;
this.testCase = testCase;
+ this.testSuite = testSuite;
}
public int getTestIndex() {
@@ -41,4 +44,8 @@ public class TestData {
public TestCase getTestCase() {
return testCase;
}
+
+ public TestSuite getTestSuite() {
+ return testSuite;
+ }
}
diff --git a/simulator/src/test/java/com/hazelcast/simulator/agent/AgentSmokeTest.java b/simulator/src/test/java/com/hazelcast/simulator/agent/AgentSmokeTest.java
index ec3bf6bf4..ec885ae6c 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/agent/AgentSmokeTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/agent/AgentSmokeTest.java
@@ -21,7 +21,6 @@ import com.hazelcast.simulator.protocol.core.SimulatorAddress;
import com.hazelcast.simulator.protocol.operation.CreateTestOperation;
import com.hazelcast.simulator.protocol.operation.FailureOperation;
import com.hazelcast.simulator.protocol.operation.InitSessionOperation;
-import com.hazelcast.simulator.protocol.operation.InitTestSuiteOperation;
import com.hazelcast.simulator.protocol.operation.StartTestOperation;
import com.hazelcast.simulator.protocol.operation.StartTestPhaseOperation;
import com.hazelcast.simulator.protocol.operation.StopTestOperation;
@@ -101,7 +100,7 @@ public class AgentSmokeTest implements FailureListener {
testPhaseListeners = new TestPhaseListeners();
PerformanceStatsCollector performanceStatsCollector = new PerformanceStatsCollector();
outputDirectory = createTmpDirectory();
- failureCollector = new FailureCollector(outputDirectory);
+ failureCollector = new FailureCollector(outputDirectory, componentRegistry);
CoordinatorOperationProcessor processor = new CoordinatorOperationProcessor(
null, failureCollector, testPhaseListeners, performanceStatsCollector);
@@ -186,7 +185,6 @@ public class AgentSmokeTest implements FailureListener {
try {
String testId = testCase.getId();
TestSuite testSuite = new TestSuite();
- remoteClient.sendToAllAgents(new InitTestSuiteOperation(testSuite));
testSuite.addTest(testCase);
componentRegistry.addTests(testSuite);
@@ -224,7 +222,7 @@ public class AgentSmokeTest implements FailureListener {
runPhase(testPhaseListener, testCase, TestPhase.GLOBAL_TEARDOWN);
runPhase(testPhaseListener, testCase, TestPhase.LOCAL_TEARDOWN);
} finally {
- componentRegistry.removeTests();
+ // componentRegistry.removeTests();
LOGGER.info("Terminating workers...");
new TerminateWorkersTask(simulatorProperties, componentRegistry, remoteClient).run();
diff --git a/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java b/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
index d16d749d5..85332dd7c 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
@@ -62,10 +62,7 @@ public class FailureHandlerImplTest {
when(agentConnector.write(any(SimulatorAddress.class), any(SimulatorOperation.class))).thenReturn(response);
when(agentConnector.getFutureMap()).thenReturn(futureMap);
- TestSuite testSuite = new TestSuite();
-
failureSender = new FailureHandlerImpl("127.0.0.1", agentConnector);
- failureSender.setTestSuite(testSuite);
}
@Test
diff --git a/simulator/src/test/java/com/hazelcast/simulator/coordinator/FailureCollectorTest.java b/simulator/src/test/java/com/hazelcast/simulator/coordinator/FailureCollectorTest.java
index 7eddd88d1..26ed1a649 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/coordinator/FailureCollectorTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/coordinator/FailureCollectorTest.java
@@ -3,6 +3,7 @@ package com.hazelcast.simulator.coordinator;
import com.hazelcast.simulator.protocol.core.AddressLevel;
import com.hazelcast.simulator.protocol.core.SimulatorAddress;
import com.hazelcast.simulator.protocol.operation.FailureOperation;
+import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.utils.CommandLineExitException;
import com.hazelcast.simulator.utils.TestUtils;
import org.junit.After;
@@ -26,23 +27,25 @@ public class FailureCollectorTest {
private FailureOperation oomOperation;
private FailureOperation finishedOperation;
private File outputDirectory;
+ private ComponentRegistry componentRegistry;
@Before
public void setUp() {
outputDirectory = TestUtils.createTmpDirectory();
- failureCollector = new FailureCollector(outputDirectory);
+ componentRegistry = new ComponentRegistry();
+ failureCollector = new FailureCollector(outputDirectory, componentRegistry);
SimulatorAddress workerAddress = new SimulatorAddress(AddressLevel.WORKER, 1, 1, 0);
String agentAddress = workerAddress.getParent().toString();
exceptionOperation = new FailureOperation("exception", WORKER_EXCEPTION, workerAddress, agentAddress,
- "127.0.0.1:5701", "workerId", "testId", null, null);
+ "127.0.0.1:5701", "workerId", "testId", null);
oomOperation = new FailureOperation("oom", WORKER_OOM, workerAddress, agentAddress,
- "127.0.0.1:5701", "workerId", "testId", null, null);
+ "127.0.0.1:5701", "workerId", "testId", null);
finishedOperation = new FailureOperation("finished", WORKER_FINISHED, workerAddress, agentAddress,
- "127.0.0.1:5701", "workerId", "testId", null, null);
+ "127.0.0.1:5701", "workerId", "testId", null);
}
@After
diff --git a/simulator/src/test/java/com/hazelcast/simulator/coordinator/RunTestSuiteTaskTest.java b/simulator/src/test/java/com/hazelcast/simulator/coordinator/RunTestSuiteTaskTest.java
index d20181033..3c0412b46 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/coordinator/RunTestSuiteTaskTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/coordinator/RunTestSuiteTaskTest.java
@@ -5,6 +5,7 @@ import com.hazelcast.simulator.common.SimulatorProperties;
import com.hazelcast.simulator.common.TestCase;
import com.hazelcast.simulator.common.TestPhase;
import com.hazelcast.simulator.common.TestSuite;
+import com.hazelcast.simulator.common.WorkerType;
import com.hazelcast.simulator.protocol.connector.CoordinatorConnector;
import com.hazelcast.simulator.protocol.core.Response;
import com.hazelcast.simulator.protocol.core.ResponseType;
@@ -18,7 +19,6 @@ import com.hazelcast.simulator.protocol.operation.StopTestOperation;
import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.protocol.registry.TargetType;
import com.hazelcast.simulator.protocol.registry.TestData;
-import com.hazelcast.simulator.common.WorkerType;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -31,6 +31,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.simulator.TestEnvironmentUtils.setupFakeEnvironment;
import static com.hazelcast.simulator.TestEnvironmentUtils.tearDownFakeEnvironment;
@@ -62,6 +63,7 @@ import static org.mockito.Mockito.when;
public class RunTestSuiteTaskTest {
+ private static final AtomicLong ID_GENERATOR = new AtomicLong();
private CountDownLatch finishWorkerLatch = new CountDownLatch(1);
private File outputDirectory;
@@ -90,14 +92,14 @@ public class RunTestSuiteTaskTest {
@Before
public void setUp() {
testSuite = new TestSuite();
- testSuite.addTest(new TestCase("CoordinatorTest1"));
- testSuite.addTest(new TestCase("CoordinatorTest2"));
+ testSuite.addTest(new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet()));
+ testSuite.addTest(new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet()));
outputDirectory = createTmpDirectory();
SimulatorAddress address = new SimulatorAddress(WORKER, 1, 1, 0);
criticalFailureOperation = new FailureOperation("expected critical failure", WORKER_EXCEPTION, address, "127.0.0.1",
- "127.0.0.1:5701", "workerId", "CoordinatorTest1", testSuite, "stacktrace");
+ "127.0.0.1:5701", "workerId", "CoordinatorTest1", "stacktrace");
simulatorProperties = new SimulatorProperties();
@@ -194,7 +196,7 @@ public class RunTestSuiteTaskTest {
@Test
public void runSequential_withSingleTest() {
- TestCase testCase = new TestCase("CoordinatorTest");
+ TestCase testCase = new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet());
testSuite = new TestSuite();
testSuite.addTest(testCase);
@@ -208,7 +210,7 @@ public class RunTestSuiteTaskTest {
@Test
public void runParallel_withSingleTest() {
- TestCase testCase = new TestCase("CoordinatorTest");
+ TestCase testCase = new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet());
testSuite = new TestSuite();
testSuite.addTest(testCase);
@@ -308,9 +310,8 @@ public class RunTestSuiteTaskTest {
componentRegistry = new ComponentRegistry();
componentRegistry.addAgent("127.0.0.1", "127.0.0.1");
componentRegistry.addWorkers(componentRegistry.getFirstAgent().getAddress(), singletonList(workerProcessSettings));
- componentRegistry.addTests(testSuite);
- failureCollector = new FailureCollector(outputDirectory);
+ failureCollector = new FailureCollector(outputDirectory, componentRegistry);
PerformanceStatsCollector performanceStatsCollector = new PerformanceStatsCollector();
TestPhaseListeners testPhaseListeners = new TestPhaseListeners();
@@ -438,7 +439,7 @@ public class RunTestSuiteTaskTest {
if (finishWorkerLatch != null) {
await(finishWorkerLatch);
FailureOperation operation = new FailureOperation("Worker finished", WORKER_FINISHED, workerAddress, "127.0.0.1",
- "127.0.0.1:5701", "workerId", "testId", testSuite, "stacktrace");
+ "127.0.0.1:5701", "workerId", "testId", "stacktrace");
failureCollector.notify(operation);
}
}
diff --git a/simulator/src/test/java/com/hazelcast/simulator/protocol/ProtocolUtil.java b/simulator/src/test/java/com/hazelcast/simulator/protocol/ProtocolUtil.java
index 0c91c3cb1..400adbca2 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/protocol/ProtocolUtil.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/protocol/ProtocolUtil.java
@@ -19,6 +19,7 @@ import com.hazelcast.simulator.protocol.operation.IntegrationTestOperation;
import com.hazelcast.simulator.protocol.operation.SimulatorOperation;
import com.hazelcast.simulator.protocol.processors.CoordinatorOperationProcessor;
import com.hazelcast.simulator.protocol.processors.TestOperationProcessor;
+import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.utils.TestUtils;
import com.hazelcast.simulator.utils.ThreadSpawner;
import com.hazelcast.simulator.worker.Worker;
@@ -163,7 +164,8 @@ class ProtocolUtil {
TestPhaseListeners testPhaseListeners = new TestPhaseListeners();
PerformanceStatsCollector performanceStatsCollector = new PerformanceStatsCollector();
File outputDirectory = TestUtils.createTmpDirectory();
- FailureCollector failureCollector = new FailureCollector(outputDirectory);
+ ComponentRegistry componentRegistry = new ComponentRegistry();
+ FailureCollector failureCollector = new FailureCollector(outputDirectory, componentRegistry);
CoordinatorOperationProcessor operationProcessor = new CoordinatorOperationProcessor(
null, failureCollector,testPhaseListeners,performanceStatsCollector);
CoordinatorConnector coordinatorConnector = new CoordinatorConnector(operationProcessor, coordinatorPort);
diff --git a/simulator/src/test/java/com/hazelcast/simulator/protocol/operation/FailureOperationTest.java b/simulator/src/test/java/com/hazelcast/simulator/protocol/operation/FailureOperationTest.java
index 03be958fc..49d9d76a2 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/protocol/operation/FailureOperationTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/protocol/operation/FailureOperationTest.java
@@ -35,7 +35,7 @@ public class FailureOperationTest {
cause = new TestException("expected exception");
operation = new FailureOperation("FailureOperationTest", WORKER_EXCEPTION, workerAddress, null, cause);
fullOperation = new FailureOperation("FailureOperationTest", WORKER_EXCEPTION, workerAddress, null, "127.0.0.1:5701",
- "C_A1_W1-member", TEST_ID, testSuite, null);
+ "C_A1_W1-member", TEST_ID, null).setTestSuite(testSuite);
}
@Test
diff --git a/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessorTest.java b/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessorTest.java
index 353d8c0b2..b65815611 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessorTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessorTest.java
@@ -11,7 +11,6 @@ import com.hazelcast.simulator.protocol.connector.AgentConnector;
import com.hazelcast.simulator.protocol.core.ResponseType;
import com.hazelcast.simulator.protocol.operation.CreateTestOperation;
import com.hazelcast.simulator.protocol.operation.CreateWorkerOperation;
-import com.hazelcast.simulator.protocol.operation.InitTestSuiteOperation;
import com.hazelcast.simulator.protocol.operation.IntegrationTestOperation;
import com.hazelcast.simulator.protocol.operation.SimulatorOperation;
import com.hazelcast.simulator.protocol.operation.StartTimeoutDetectionOperation;
@@ -79,7 +78,6 @@ public class AgentOperationProcessorTest {
agent = mock(Agent.class);
when(agent.getAddressIndex()).thenReturn(1);
when(agent.getPublicAddress()).thenReturn("127.0.0.1");
- when(agent.getTestSuite()).thenReturn(testSuite);
when(agent.getSessionId()).thenReturn("AgentOperationProcessorTest");
when(agent.getSessionDirectory()).thenReturn(sessionDir);
when(agent.getAgentConnector()).thenReturn(agentConnector);
@@ -112,16 +110,6 @@ public class AgentOperationProcessorTest {
assertEquals(UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR, responseType);
}
- @Test
- public void testInitTestSuiteOperation() throws Exception {
- SimulatorOperation operation = new InitTestSuiteOperation(testSuite);
- ResponseType responseType = processor.processOperation(getOperationType(operation), operation, COORDINATOR);
-
- assertEquals(SUCCESS, responseType);
-
- verify(agent).setTestSuite(any(TestSuite.class));
- }
-
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testCreateWorkerOperation() throws Exception {
ResponseType responseType = testCreateWorkerOperation(false, DEFAULT_STARTUP_TIMEOUT);
diff --git a/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessorTest.java b/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessorTest.java
index d80f3616e..2321fb208 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessorTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessorTest.java
@@ -14,6 +14,7 @@ import com.hazelcast.simulator.protocol.operation.IntegrationTestOperation;
import com.hazelcast.simulator.protocol.operation.PerformanceStatsOperation;
import com.hazelcast.simulator.protocol.operation.PhaseCompletedOperation;
import com.hazelcast.simulator.protocol.operation.SimulatorOperation;
+import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.test.TestException;
import com.hazelcast.simulator.utils.TestUtils;
import com.hazelcast.simulator.worker.performance.PerformanceStats;
@@ -54,6 +55,7 @@ public class CoordinatorOperationProcessorTest implements FailureListener {
private CoordinatorOperationProcessor processor;
private File outputDirectory;
+ private ComponentRegistry componentRegistry;
@Before
public void setUp() {
@@ -62,8 +64,9 @@ public class CoordinatorOperationProcessorTest implements FailureListener {
testPhaseListeners = new TestPhaseListeners();
performanceStatsCollector = new PerformanceStatsCollector();
+ componentRegistry = new ComponentRegistry();
outputDirectory = TestUtils.createTmpDirectory();
- failureCollector = new FailureCollector(outputDirectory);
+ failureCollector = new FailureCollector(outputDirectory, componentRegistry);
processor = new CoordinatorOperationProcessor(null, failureCollector, testPhaseListeners,
performanceStatsCollector);
@@ -82,17 +85,6 @@ public class CoordinatorOperationProcessorTest implements FailureListener {
assertEquals(UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR, responseType);
}
-// @Test
-// public void processException() {
-// TestException exception = new TestException("expected exception");
-// ExceptionOperation operation = new ExceptionOperation(WORKER_EXCEPTION.name(), "C_A1_W1", "FailingTest", exception);
-//
-// ResponseType responseType = processor.process(operation, workerAddress);
-//
-// assertEquals(SUCCESS, responseType);
-// //assertEquals(1, exceptionLogger.getExceptionCount());
-// }
-
@Test
public void processFailureOperation() {
failureCollector.addListener(this);
diff --git a/simulator/src/test/java/com/hazelcast/simulator/protocol/registry/ComponentRegistryTest.java b/simulator/src/test/java/com/hazelcast/simulator/protocol/registry/ComponentRegistryTest.java
index 9f78641eb..e1ce4b26b 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/protocol/registry/ComponentRegistryTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/protocol/registry/ComponentRegistryTest.java
@@ -8,6 +8,7 @@ import com.hazelcast.simulator.protocol.core.SimulatorAddress;
import com.hazelcast.simulator.protocol.registry.AgentData.AgentWorkerMode;
import com.hazelcast.simulator.utils.CommandLineExitException;
import com.hazelcast.simulator.common.WorkerType;
+import junit.framework.TestResult;
import org.junit.Test;
import java.util.ArrayList;
@@ -16,6 +17,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import static junit.framework.TestCase.assertSame;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -280,20 +282,30 @@ public class ComponentRegistryTest {
testSuite.addTest(new TestCase("Test2"));
testSuite.addTest(new TestCase("Test3"));
- componentRegistry.addTests(testSuite);
+ List<TestData> tests = componentRegistry.addTests(testSuite);
+ assertEquals(3, tests.size());
assertEquals(3, componentRegistry.testCount());
}
@Test
public void testRemoveTests() {
- TestSuite testSuite = new TestSuite();
- testSuite.addTest(new TestCase("Test1"));
- componentRegistry.addTests(testSuite);
+ TestSuite testSuite1 = new TestSuite()
+ .addTest(new TestCase("Test1a"))
+ .addTest(new TestCase("Test1b"));
+ componentRegistry.addTests(testSuite1);
+
+ TestSuite testSuite2 = new TestSuite()
+ .addTest(new TestCase("Test2a"))
+ .addTest(new TestCase("Test2b"));
+ componentRegistry.addTests(testSuite2);
- componentRegistry.removeTests();
+ componentRegistry.removeTests(testSuite1);
- assertEquals(0, componentRegistry.testCount());
+ assertEquals(2, componentRegistry.testCount());
+ for(TestData testData: componentRegistry.getTests()){
+ assertSame(testSuite2, testData.getTestSuite());
+ }
}
@Test | ['simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java', 'simulator/src/test/java/com/hazelcast/simulator/coordinator/FailureCollectorTest.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/operation/OperationType.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/registry/TestData.java', 'simulator/src/test/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessorTest.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/FailureCollector.java', 'simulator/src/test/java/com/hazelcast/simulator/coordinator/RunTestSuiteTaskTest.java', 'simulator/src/test/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessorTest.java', 'simulator/src/test/java/com/hazelcast/simulator/protocol/ProtocolUtil.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java', 'simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java', 'simulator/src/test/java/com/hazelcast/simulator/agent/AgentSmokeTest.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/operation/FailureOperation.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java', 'simulator/src/test/java/com/hazelcast/simulator/protocol/operation/FailureOperationTest.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/registry/ComponentRegistry.java', 'simulator/src/test/java/com/hazelcast/simulator/protocol/registry/ComponentRegistryTest.java', 'simulator/src/main/java/com/hazelcast/simulator/protocol/operation/InitTestSuiteOperation.java'] | {'.java': 20} | 20 | 20 | 0 | 0 | 20 | 982,145 | 196,691 | 25,645 | 225 | 7,106 | 1,407 | 160 | 11 | 101 | 18 | 23 | 2 | 0 | 0 | 1970-01-01T00:24:32 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
345 | hazelcast/hazelcast-simulator/1128/1123 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/1123 | https://github.com/hazelcast/hazelcast-simulator/pull/1128 | https://github.com/hazelcast/hazelcast-simulator/pull/1128 | 1 | fix | Remote: when fail fast, test not aborting | When deliberately causing a test failure by killing the members (and having the load generated by a client),the testcase runner doesn't complete
```
INFO 14:49:10 AtomicLongTest Running 00d 00h 00m 20s ( 6.67%) 54,600 ops 3,409.70 ops/s 292 µs (avg) 2,619 µs (99.9th) 19,628 µs (max)
INFO 14:49:15 Valid connection from /127.0.0.1:39036 (magic bytes found)
INFO 14:49:15 Killing working....
INFO 14:49:15 Kill send to worker [C_A2_W1]
INFO 14:49:20 AtomicLongTest Running 00d 00h 00m 30s ( 10.00%) 92,414 ops 3,781.40 ops/s 263 µs (avg) 1,140 µs (99.9th) 21,725 µs (max)
INFO 14:49:30 AtomicLongTest Running 00d 00h 00m 40s ( 13.33%) 92,915 ops 50.10 ops/s 295 µs (avg) 2,059 µs (99.9th) 2,772 µs (max)
INFO 14:49:40 AtomicLongTest Running 00d 00h 00m 50s ( 16.67%) 153,606 ops 6,069.10 ops/s 326 µs (avg) 332 µs (99.9th) 10,015,997 µs (max)
INFO 14:49:41 Valid connection from /127.0.0.1:39208 (magic bytes found)
INFO 14:49:41 Killing working....
INFO 14:49:41 Kill send to worker [C_A1_W1]
ERROR 14:49:47 Failure #1 C_A1_W3 run-AtomicLongTest WORKER_EXCEPTION[com.hazelcast.spi.exception.TargetDisconnectedException: Disconnecting from member [10.30.221.14]:5701 due to heartbeat problems. Current time: 2016-08-23 07:49:47.151. Last heartbeat requested: 2016-08-23 07:49:30.151. Last heartbeat received: 2016-08-23 07:49:30.153. Last read: 2016-08-23 07:49:46.307. Connection ClientConnection{live=false, connectionId=2, socketChannel=DefaultSocketChannelWrapper{socketChannel=java.nio.channels.SocketChannel[closed]}, remoteEndpoint=[10.30.221.14]:5701, lastReadTime=2016-08-23 07:49:46.307, lastWriteTime=2016-08-23 07:49:46.305, closedTime=2016-08-23 07:49:46.307, lastHeartbeatRequested=2016-08-23 07:49:30.151, lastHeartbeatReceived=2016-08-23 07:49:30.153}]
INFO 14:49:50 AtomicLongTest Running 00d 00h 01m 00s ( 20.00%) 195,126 ops 4,151.58 ops/s 145 µs (avg) 256 µs (99.9th) 27,918 µs (max)
INFO 14:49:50 AtomicLongTest Critical failure detected, aborting run phase
INFO 14:49:51 AtomicLongTest Test finished running
INFO 14:49:51 AtomicLongTest Executing Test stop
ERROR 14:52:51 Failure #2 C_A1_W3 WORKER_TIMEOUT[Worker has not sent a message for 180 seconds]
```
I guess it is waiting for the old member count, not the current member count.
| 86e080b937212589215b80309a3bef116e9da3f9 | c3e7b452eb6aa26fe305dd41ab7ed91641aa9da7 | https://github.com/hazelcast/hazelcast-simulator/compare/86e080b937212589215b80309a3bef116e9da3f9...c3e7b452eb6aa26fe305dd41ab7ed91641aa9da7 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java b/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
index 49616abb7..03c687d95 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java
@@ -55,7 +55,7 @@ public class Agent {
private final String cloudCredential;
private final AgentConnector agentConnector;
- private final FailureSenderImpl failureSender;
+ private final FailureHandlerImpl failureSender;
private final WorkerProcessFailureMonitor workerProcessFailureMonitor;
private volatile TestSuite testSuite;
@@ -74,7 +74,7 @@ public class Agent {
this.cloudCredential = cloudCredential;
this.agentConnector = AgentConnector.createInstance(this, workerProcessManager, port, threadPoolSize);
- this.failureSender = new FailureSenderImpl(publicAddress, agentConnector);
+ this.failureSender = new FailureHandlerImpl(publicAddress, agentConnector);
this.workerProcessFailureMonitor = new WorkerProcessFailureMonitor(failureSender, workerProcessManager,
workerLastSeenTimeoutSeconds);
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureSenderImpl.java b/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
similarity index 82%
rename from simulator/src/main/java/com/hazelcast/simulator/agent/FailureSenderImpl.java
rename to simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
index 45f1f3fa7..e9b341ab7 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureSenderImpl.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/FailureHandlerImpl.java
@@ -15,6 +15,7 @@
*/
package com.hazelcast.simulator.agent;
+import com.hazelcast.simulator.agent.workerprocess.FailureHandler;
import com.hazelcast.simulator.agent.workerprocess.WorkerProcess;
import com.hazelcast.simulator.common.FailureType;
import com.hazelcast.simulator.common.TestSuite;
@@ -30,9 +31,9 @@ import org.apache.log4j.Logger;
import static com.hazelcast.simulator.protocol.core.SimulatorAddress.COORDINATOR;
import static java.lang.String.format;
-class FailureSenderImpl implements FailureSender {
+class FailureHandlerImpl implements FailureHandler {
- private static final Logger LOGGER = Logger.getLogger(FailureSenderImpl.class);
+ private static final Logger LOGGER = Logger.getLogger(FailureHandlerImpl.class);
private final String agentAddress;
private final AgentConnector agentConnector;
@@ -41,7 +42,7 @@ class FailureSenderImpl implements FailureSender {
private int failureCount;
- FailureSenderImpl(String agentAddress, AgentConnector agentConnector) {
+ FailureHandlerImpl(String agentAddress, AgentConnector agentConnector) {
this.agentAddress = agentAddress;
this.agentConnector = agentConnector;
}
@@ -51,14 +52,26 @@ class FailureSenderImpl implements FailureSender {
}
@Override
- public boolean sendFailureOperation(String message,
- FailureType type,
- WorkerProcess workerProcess,
- String testId,
- String cause) {
- boolean sentSuccessfully = true;
+ public boolean handle(String message, FailureType type, WorkerProcess workerProcess, String testId, String cause) {
+ boolean send = true;
+
+ if (!workerProcess.isFailureIgnored()) {
+ send = send(message, type, workerProcess, testId, cause);
+ }
+
+ if (type.isWorkerFinishedFailure()) {
+ SimulatorAddress workerAddress = workerProcess.getAddress();
+ unblockPendingFutures(workerAddress);
+ removeFinishedWorker(workerAddress, type);
+ }
+
+ return send;
+ }
+
+ private boolean send(String message, FailureType type, WorkerProcess workerProcess, String testId, String cause) {
SimulatorAddress workerAddress = workerProcess.getAddress();
String workerId = workerProcess.getId();
+
FailureOperation operation = new FailureOperation(message, type, workerAddress, agentAddress,
workerProcess.getHazelcastAddress(), workerId, testId, testSuite, cause);
@@ -69,28 +82,23 @@ class FailureSenderImpl implements FailureSender {
operation.getLogMessage(++failureCount)));
}
+ boolean send = true;
try {
Response response = agentConnector.write(COORDINATOR, operation);
ResponseType firstErrorResponseType = response.getFirstErrorResponseType();
if (firstErrorResponseType != ResponseType.SUCCESS) {
LOGGER.error(format("Could not send failure to coordinator: %s", firstErrorResponseType));
- sentSuccessfully = false;
+ send = false;
} else if (!type.isPoisonPill()) {
LOGGER.info("Failure successfully sent to Coordinator!");
}
} catch (SimulatorProtocolException e) {
if (!(e.getCause() instanceof InterruptedException)) {
LOGGER.error(format("Could not send failure to coordinator! %s", operation.getFileMessage()), e);
- sentSuccessfully = false;
+ send = false;
}
}
-
- if (type.isWorkerFinishedFailure()) {
- unblockPendingFutures(workerAddress);
- removeFinishedWorker(workerAddress, type);
- }
-
- return sentSuccessfully;
+ return send;
}
private void unblockPendingFutures(SimulatorAddress workerAddress) {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureSender.java b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/FailureHandler.java
similarity index 72%
rename from simulator/src/main/java/com/hazelcast/simulator/agent/FailureSender.java
rename to simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/FailureHandler.java
index 3f18e7884..d5a54d5e4 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/FailureSender.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/FailureHandler.java
@@ -13,12 +13,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.hazelcast.simulator.agent;
+package com.hazelcast.simulator.agent.workerprocess;
-import com.hazelcast.simulator.agent.workerprocess.WorkerProcess;
import com.hazelcast.simulator.common.FailureType;
-public interface FailureSender {
+public interface FailureHandler {
- boolean sendFailureOperation(String message, FailureType type, WorkerProcess workerProcess, String testId, String cause);
+ boolean handle(String message, FailureType type, WorkerProcess workerProcess, String testId, String cause);
}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcess.java b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcess.java
index 6e25e4a13..cd2892aec 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcess.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcess.java
@@ -33,6 +33,7 @@ public class WorkerProcess {
private volatile boolean isFinished;
private volatile Process process;
private volatile String hzAddress;
+ private volatile boolean failureIgnored;
public WorkerProcess(SimulatorAddress address, String id, File workerHome) {
this.address = address;
@@ -95,4 +96,12 @@ public class WorkerProcess {
public void setHzAddress(String memberAddress) {
this.hzAddress = memberAddress;
}
+
+ public boolean isFailureIgnored() {
+ return failureIgnored;
+ }
+
+ public void setFailureIgnored(boolean failureIgnored) {
+ this.failureIgnored = failureIgnored;
+ }
}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitor.java b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitor.java
index 0c7db04c2..73d30dc3e 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitor.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitor.java
@@ -15,13 +15,11 @@
*/
package com.hazelcast.simulator.agent.workerprocess;
-import com.hazelcast.simulator.agent.FailureSender;
import com.hazelcast.simulator.common.FailureType;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FilenameFilter;
-import java.util.concurrent.TimeUnit;
import static com.hazelcast.simulator.common.FailureType.WORKER_EXCEPTION;
import static com.hazelcast.simulator.common.FailureType.WORKER_EXIT;
@@ -34,6 +32,7 @@ import static com.hazelcast.simulator.utils.FileUtils.fileAsText;
import static com.hazelcast.simulator.utils.FileUtils.rename;
import static com.hazelcast.simulator.utils.FormatUtils.NEW_LINE;
import static java.lang.String.format;
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
public class WorkerProcessFailureMonitor {
@@ -44,17 +43,17 @@ public class WorkerProcessFailureMonitor {
private final MonitorThread monitorThread;
- public WorkerProcessFailureMonitor(FailureSender failureSender,
+ public WorkerProcessFailureMonitor(FailureHandler failureHandler,
WorkerProcessManager workerProcessManager,
int lastSeenTimeoutSeconds) {
- this(failureSender, workerProcessManager, lastSeenTimeoutSeconds, DEFAULT_CHECK_INTERVAL_MILLIS);
+ this(failureHandler, workerProcessManager, lastSeenTimeoutSeconds, DEFAULT_CHECK_INTERVAL_MILLIS);
}
- WorkerProcessFailureMonitor(FailureSender failureSender,
+ WorkerProcessFailureMonitor(FailureHandler failureHandler,
WorkerProcessManager workerProcessManager,
int lastSeenTimeoutSeconds,
int checkIntervalMillis) {
- monitorThread = new MonitorThread(failureSender, workerProcessManager, lastSeenTimeoutSeconds, checkIntervalMillis);
+ monitorThread = new MonitorThread(failureHandler, workerProcessManager, lastSeenTimeoutSeconds, checkIntervalMillis);
}
public void start() {
@@ -83,7 +82,7 @@ public class WorkerProcessFailureMonitor {
private final class MonitorThread extends Thread {
- private final FailureSender failureSender;
+ private final FailureHandler failureHandler;
private final WorkerProcessManager workerProcessManager;
private final int lastSeenTimeoutSeconds;
private final int checkIntervalMillis;
@@ -91,14 +90,14 @@ public class WorkerProcessFailureMonitor {
private volatile boolean running = true;
private volatile boolean detectTimeouts;
- private MonitorThread(FailureSender failureSender,
+ private MonitorThread(FailureHandler failureHandler,
WorkerProcessManager workerProcessManager,
int lastSeenTimeoutSeconds,
int checkIntervalMillis) {
super("WorkerJvmFailureMonitorThread");
setDaemon(true);
- this.failureSender = failureSender;
+ this.failureHandler = failureHandler;
this.workerProcessManager = workerProcessManager;
this.lastSeenTimeoutSeconds = lastSeenTimeoutSeconds;
this.checkIntervalMillis = checkIntervalMillis;
@@ -161,7 +160,7 @@ public class WorkerProcessFailureMonitor {
}
// we delete or rename the exception file so that we don't detect the same exception again
- boolean send = failureSender.sendFailureOperation(
+ boolean send = failureHandler.handle(
"Worked ran into an unhandled exception", WORKER_EXCEPTION, workerProcess, testId, cause);
if (send) {
@@ -199,7 +198,7 @@ public class WorkerProcessFailureMonitor {
return;
}
- long elapsed = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - workerProcess.getLastSeen());
+ long elapsed = MILLISECONDS.toSeconds(System.currentTimeMillis() - workerProcess.getLastSeen());
if (elapsed > 0 && elapsed % lastSeenTimeoutSeconds == 0) {
sendFailureOperation(
format("Worker has not sent a message for %d seconds", elapsed), WORKER_TIMEOUT, workerProcess);
@@ -229,7 +228,7 @@ public class WorkerProcessFailureMonitor {
}
private void sendFailureOperation(String message, FailureType type, WorkerProcess workerProcess) {
- failureSender.sendFailureOperation(message, type, workerProcess, null, null);
+ failureHandler.handle(message, type, workerProcess, null, null);
}
}
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessManager.java b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessManager.java
index a02cf8721..6f45b351c 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessManager.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessManager.java
@@ -44,8 +44,10 @@ public class WorkerProcessManager {
}
public void ignore(SimulatorAddress workerAddress) {
- LOGGER.info("Dropping worker [" + workerAddress + "]");
- workerProcesses.remove(workerAddress);
+ WorkerProcess workerProcess = workerProcesses.get(workerAddress);
+ if (workerProcess != null) {
+ workerProcess.setFailureIgnored(true);
+ }
}
public Collection<WorkerProcess> getWorkerProcesses() {
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
index a5517d049..c72eca66c 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java
@@ -428,9 +428,13 @@ public final class Coordinator {
}
SimulatorAddress memberAddress = randomMember.getAddress();
+
componentRegistry.removeWorker(memberAddress);
+
coordinatorConnector.write(memberAddress.getParent(), new IgnoreWorkerFailureOperation(memberAddress));
+
coordinatorConnector.writeAsync(memberAddress, new KillWorkerOperation());
+
LOGGER.info("Kill send to worker [" + memberAddress + "]");
}
diff --git a/simulator/src/test/java/com/hazelcast/simulator/agent/FailureSenderImplTest.java b/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
similarity index 84%
rename from simulator/src/test/java/com/hazelcast/simulator/agent/FailureSenderImplTest.java
rename to simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
index 82b74422a..d16d749d5 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/agent/FailureSenderImplTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/agent/FailureHandlerImplTest.java
@@ -31,10 +31,10 @@ import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-public class FailureSenderImplTest {
+public class FailureHandlerImplTest {
private static final String FAILURE_MESSAGE = "failure message";
- private static final String SESSION_ID = "FailureSenderImplTest";
+ private static final String SESSION_ID = "FailureHandlerImplTest";
private static final String CAUSE = "any stacktrace";
private SimulatorAddress workerAddress;
@@ -42,7 +42,7 @@ public class FailureSenderImplTest {
private AgentConnector agentConnector;
- private FailureSenderImpl failureSender;
+ private FailureHandlerImpl failureSender;
private ResponseFuture responseFuture;
@@ -64,13 +64,13 @@ public class FailureSenderImplTest {
TestSuite testSuite = new TestSuite();
- failureSender = new FailureSenderImpl("127.0.0.1", agentConnector);
+ failureSender = new FailureHandlerImpl("127.0.0.1", agentConnector);
failureSender.setTestSuite(testSuite);
}
@Test
public void testSendFailureOperation() {
- boolean success = failureSender.sendFailureOperation(FAILURE_MESSAGE, WORKER_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
+ boolean success = failureSender.handle(FAILURE_MESSAGE, WORKER_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
assertTrue(success);
assertFalse(responseFuture.isDone());
@@ -78,7 +78,7 @@ public class FailureSenderImplTest {
@Test(timeout = 10000)
public void testSendFailureOperation_whenWorkerIsFinished_thenUnblockResponseFutureByFailure() throws Exception {
- boolean success = failureSender.sendFailureOperation(FAILURE_MESSAGE, WORKER_FINISHED, workerProcess, SESSION_ID, CAUSE);
+ boolean success = failureSender.handle(FAILURE_MESSAGE, WORKER_FINISHED, workerProcess, SESSION_ID, CAUSE);
assertTrue(success);
assertTrue(responseFuture.isDone());
@@ -90,7 +90,7 @@ public class FailureSenderImplTest {
Response failureResponse = new Response(1, COORDINATOR, workerAddress, FAILURE_COORDINATOR_NOT_FOUND);
when(agentConnector.write(any(SimulatorAddress.class), any(SimulatorOperation.class))).thenReturn(failureResponse);
- boolean success = failureSender.sendFailureOperation(FAILURE_MESSAGE, WORKER_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
+ boolean success = failureSender.handle(FAILURE_MESSAGE, WORKER_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
assertFalse(success);
assertFalse(responseFuture.isDone());
@@ -101,7 +101,7 @@ public class FailureSenderImplTest {
when(agentConnector.write(any(SimulatorAddress.class), any(SimulatorOperation.class)))
.thenThrow(new SimulatorProtocolException("expected exception"));
- boolean success = failureSender.sendFailureOperation(FAILURE_MESSAGE, NETTY_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
+ boolean success = failureSender.handle(FAILURE_MESSAGE, NETTY_EXCEPTION, workerProcess, SESSION_ID, CAUSE);
assertFalse(success);
assertFalse(responseFuture.isDone());
diff --git a/simulator/src/test/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitorTest.java b/simulator/src/test/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitorTest.java
index 4d1d8aee8..582470d42 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitorTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitorTest.java
@@ -1,6 +1,5 @@
package com.hazelcast.simulator.agent.workerprocess;
-import com.hazelcast.simulator.agent.FailureSender;
import com.hazelcast.simulator.common.FailureType;
import com.hazelcast.simulator.protocol.core.Response;
import com.hazelcast.simulator.protocol.core.SimulatorAddress;
@@ -57,7 +56,7 @@ public class WorkerProcessFailureMonitorTest {
private static int addressIndex;
- private FailureSender failureSender;
+ private FailureHandler failureHandler;
private WorkerProcessManager workerProcessManager;
@@ -70,15 +69,15 @@ public class WorkerProcessFailureMonitorTest {
simulatorHome = setupFakeEnvironment();
workersHome = new File(simulatorHome, "workers");
- failureSender = mock(FailureSender.class);
- when(failureSender.sendFailureOperation(
+ failureHandler = mock(FailureHandler.class);
+ when(failureHandler.handle(
anyString(), any(FailureType.class), any(WorkerProcess.class), any(String.class), any(String.class)))
.thenReturn(true);
workerProcessManager = new WorkerProcessManager();
workerProcessFailureMonitor = new WorkerProcessFailureMonitor(
- failureSender,
+ failureHandler,
workerProcessManager,
DEFAULT_LAST_SEEN_TIMEOUT_SECONDS,
DEFAULT_CHECK_INTERVAL);
@@ -94,18 +93,18 @@ public class WorkerProcessFailureMonitorTest {
@Test
public void testConstructor() {
- workerProcessFailureMonitor = new WorkerProcessFailureMonitor(failureSender, workerProcessManager,
+ workerProcessFailureMonitor = new WorkerProcessFailureMonitor(failureHandler, workerProcessManager,
DEFAULT_LAST_SEEN_TIMEOUT_SECONDS);
workerProcessFailureMonitor.start();
- verifyZeroInteractions(failureSender);
+ verifyZeroInteractions(failureHandler);
}
@Test
public void testRun_shouldSendNoFailures() {
sleepMillis(DEFAULT_SLEEP_TIME);
- verifyZeroInteractions(failureSender);
+ verifyZeroInteractions(failureHandler);
}
@Test(timeout = DEFAULT_TIMEOUT)
@@ -123,7 +122,7 @@ public class WorkerProcessFailureMonitorTest {
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
- assertFailureType(failureSender, WORKER_OOM);
+ assertFailureType(failureHandler, WORKER_OOM);
}
});
}
@@ -132,7 +131,7 @@ public class WorkerProcessFailureMonitorTest {
public void testRun_shouldContinueAfterErrorResponse() {
Response failOnceResponse = mock(Response.class);
when(failOnceResponse.getFirstErrorResponseType()).thenReturn(FAILURE_COORDINATOR_NOT_FOUND).thenReturn(SUCCESS);
- when(failureSender.sendFailureOperation(
+ when(failureHandler.handle(
anyString(), any(FailureType.class), any(WorkerProcess.class), any(String.class), any(String.class)))
.thenReturn(false);
@@ -140,12 +139,12 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXIT);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXIT);
}
@Test
public void testRun_shouldContinueAfterSendFailure() {
- when(failureSender.sendFailureOperation(
+ when(failureHandler.handle(
anyString(), any(FailureType.class), any(WorkerProcess.class), any(String.class), any(String.class)))
.thenReturn(false)
.thenReturn(true);
@@ -154,7 +153,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXIT);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXIT);
}
@Test
@@ -168,7 +167,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXCEPTION);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXCEPTION);
assertThatExceptionFileDoesNotExist(exceptionFile);
}
@@ -181,7 +180,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXCEPTION);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXCEPTION);
assertThatExceptionFileDoesNotExist(exceptionFile);
}
@@ -194,13 +193,13 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXCEPTION);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXCEPTION);
assertThatExceptionFileDoesNotExist(exceptionFile);
}
@Test
public void testRun_shouldDetectException_shouldRenameFileIfFailureOperationCouldNotBeSent_withSingleErrorResponse() {
- when(failureSender.sendFailureOperation(
+ when(failureHandler.handle(
anyString(), any(FailureType.class), any(WorkerProcess.class), any(String.class), any(String.class)))
.thenReturn(false)
.thenReturn(true);
@@ -212,14 +211,14 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXCEPTION);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXCEPTION);
assertThatExceptionFileDoesNotExist(exceptionFile);
assertThatRenamedExceptionFileExists(exceptionFile);
}
@Test
public void testRun_shouldDetectException_shouldRenameFileIfFailureOperationCouldNotBeSent_withContinuousErrorResponse() {
- when(failureSender.sendFailureOperation(
+ when(failureHandler.handle(
anyString(), any(FailureType.class), any(WorkerProcess.class), any(String.class), any(String.class)))
.thenReturn(false);
@@ -229,7 +228,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_EXCEPTION);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_EXCEPTION);
assertThatExceptionFileDoesNotExist(exceptionFile);
assertThatRenamedExceptionFileExists(exceptionFile);
}
@@ -242,7 +241,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureType(failureSender, WORKER_OOM);
+ assertFailureType(failureHandler, WORKER_OOM);
}
@Test
@@ -253,7 +252,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureType(failureSender, WORKER_OOM);
+ assertFailureType(failureHandler, WORKER_OOM);
}
@Test
@@ -265,14 +264,14 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- assertFailureTypeAtLeastOnce(failureSender, WORKER_TIMEOUT);
+ assertFailureTypeAtLeastOnce(failureHandler, WORKER_TIMEOUT);
}
@Test
public void testRun_shouldNotDetectInactivity_ifDetectionDisabled() {
WorkerProcess workerProcess = addRunningWorkerProcess();
- workerProcessFailureMonitor = new WorkerProcessFailureMonitor(failureSender, workerProcessManager, -1,
+ workerProcessFailureMonitor = new WorkerProcessFailureMonitor(failureHandler, workerProcessManager, -1,
DEFAULT_CHECK_INTERVAL);
workerProcessFailureMonitor.start();
@@ -286,7 +285,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- verifyZeroInteractions(failureSender);
+ verifyZeroInteractions(failureHandler);
}
@Test
@@ -297,7 +296,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- verifyZeroInteractions(failureSender);
+ verifyZeroInteractions(failureHandler);
}
@Test
@@ -313,7 +312,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
- verifyZeroInteractions(failureSender);
+ verifyZeroInteractions(failureHandler);
}
@Test(timeout = DEFAULT_TIMEOUT)
@@ -325,7 +324,7 @@ public class WorkerProcessFailureMonitorTest {
} while (!workerProcess.isFinished());
assertTrue(workerProcess.isFinished());
- assertFailureType(failureSender, WORKER_FINISHED);
+ assertFailureType(failureHandler, WORKER_FINISHED);
}
@Test
@@ -335,7 +334,7 @@ public class WorkerProcessFailureMonitorTest {
sleepMillis(DEFAULT_SLEEP_TIME);
assertFalse(workerProcess.isFinished());
- assertFailureType(failureSender, WORKER_EXIT);
+ assertFailureType(failureHandler, WORKER_EXIT);
}
@Test
@@ -395,21 +394,21 @@ public class WorkerProcessFailureMonitorTest {
return exceptionFile;
}
- private void assertFailureType(FailureSender failureSender, FailureType failureType) {
- assertFailureTypeAtLeastOnce(failureSender, failureType, times(1));
+ private void assertFailureType(FailureHandler failureHandler, FailureType failureType) {
+ assertFailureTypeAtLeastOnce(failureHandler, failureType, times(1));
}
- private void assertFailureTypeAtLeastOnce(FailureSender failureSender, FailureType failureType) {
- assertFailureTypeAtLeastOnce(failureSender, failureType, atLeastOnce());
+ private void assertFailureTypeAtLeastOnce(FailureHandler failureHandler, FailureType failureType) {
+ assertFailureTypeAtLeastOnce(failureHandler, failureType, atLeastOnce());
}
- private void assertFailureTypeAtLeastOnce(FailureSender failureSender, FailureType failureType, VerificationMode mode) {
- verify(failureSender, mode).sendFailureOperation(anyString(),
+ private void assertFailureTypeAtLeastOnce(FailureHandler failureHandler, FailureType failureType, VerificationMode mode) {
+ verify(failureHandler, mode).handle(anyString(),
eq(failureType),
any(WorkerProcess.class),
any(String.class),
any(String.class));
- verifyNoMoreInteractions(failureSender);
+ verifyNoMoreInteractions(failureHandler);
}
private static void assertThatExceptionFileDoesNotExist(final File firstExceptionFile) { | ['simulator/src/main/java/com/hazelcast/simulator/agent/FailureSender.java', 'simulator/src/test/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitorTest.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/FailureSenderImpl.java', 'simulator/src/test/java/com/hazelcast/simulator/agent/FailureSenderImplTest.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessManager.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/Agent.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcessFailureMonitor.java', 'simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java', 'simulator/src/main/java/com/hazelcast/simulator/agent/workerprocess/WorkerProcess.java'] | {'.java': 9} | 9 | 9 | 0 | 0 | 9 | 955,217 | 191,509 | 24,947 | 223 | 2,545 | 451 | 46 | 7 | 2,410 | 277 | 892 | 23 | 0 | 1 | 1970-01-01T00:24:32 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
346 | hazelcast/hazelcast-simulator/855/854 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/854 | https://github.com/hazelcast/hazelcast-simulator/pull/855 | https://github.com/hazelcast/hazelcast-simulator/pull/855 | 1 | fixes | Race in PerformanceStateContainer | Because queue's are swapped, it can happen that a performancestate is added to a queue that moments later gets swapped out and processed. So you get an unprocessed PerformanceState on the queue which will never been seen
| 752e35a5e13d13210e8b2793dd0be0e18a5bf9cf | 74e25d4fc23495e8ddf8ac5729e48a1e6976413e | https://github.com/hazelcast/hazelcast-simulator/compare/752e35a5e13d13210e8b2793dd0be0e18a5bf9cf...74e25d4fc23495e8ddf8ac5729e48a1e6976413e | diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/PerformanceStateContainer.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/PerformanceStateContainer.java
index 210ab7998..282612137 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/PerformanceStateContainer.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/PerformanceStateContainer.java
@@ -31,7 +31,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
import static com.hazelcast.simulator.utils.FileUtils.appendText;
import static com.hazelcast.simulator.utils.FormatUtils.NEW_LINE;
@@ -62,15 +61,12 @@ public class PerformanceStateContainer {
private final ConcurrentMap<SimulatorAddress, ConcurrentMap<String, PerformanceState>> workerLastPerformanceStateMap
= new ConcurrentHashMap<SimulatorAddress, ConcurrentMap<String, PerformanceState>>();
- // holds an AtomicReference per testCaseId with a queue of WorkerPerformanceState instances over time
- // will be swapped with a new queue when read
- private final ConcurrentMap<String, AtomicReference<Queue<WorkerPerformanceState>>> testPerformanceStateQueueRefs
- = new ConcurrentHashMap<String, AtomicReference<Queue<WorkerPerformanceState>>>();
+ // holds a queue per test with pending PerformanceState messages. The key is the testId.
+ private final ConcurrentMap<String, Queue<WorkerPerformanceState>> pendingQueueMap
+ = new ConcurrentHashMap<String, Queue<WorkerPerformanceState>>();
public void init(String testCaseId) {
- Queue<WorkerPerformanceState> queue = new ConcurrentLinkedQueue<WorkerPerformanceState>();
- AtomicReference<Queue<WorkerPerformanceState>> queueReference = new AtomicReference<Queue<WorkerPerformanceState>>(queue);
- testPerformanceStateQueueRefs.put(testCaseId, queueReference);
+ pendingQueueMap.put(testCaseId, new ConcurrentLinkedQueue<WorkerPerformanceState>());
}
public void update(SimulatorAddress workerAddress, Map<String, PerformanceState> performanceStates) {
@@ -81,12 +77,9 @@ public class PerformanceStateContainer {
ConcurrentMap<String, PerformanceState> lastPerformanceStateMap = getOrCreateLastPerformanceStateMap(workerAddress);
lastPerformanceStateMap.put(testCaseId, performanceState);
- AtomicReference<Queue<WorkerPerformanceState>> queueReference = testPerformanceStateQueueRefs.get(testCaseId);
- if (queueReference != null) {
- Queue<WorkerPerformanceState> performanceStateQueue = queueReference.get();
- if (performanceStateQueue != null) {
- performanceStateQueue.add(new WorkerPerformanceState(workerAddress, performanceState));
- }
+ Queue<WorkerPerformanceState> pendingQueue = pendingQueueMap.get(testCaseId);
+ if (pendingQueue != null) {
+ pendingQueue.add(new WorkerPerformanceState(workerAddress, performanceState));
}
}
}
@@ -121,22 +114,25 @@ public class PerformanceStateContainer {
PerformanceState get(String testCaseId) {
// return if no queue of WorkerPerformanceState can be found (unknown testCaseId)
- AtomicReference<Queue<WorkerPerformanceState>> queueReference = testPerformanceStateQueueRefs.get(testCaseId);
- if (queueReference == null) {
+ Queue<WorkerPerformanceState> pendingQueue = pendingQueueMap.get(testCaseId);
+ if (pendingQueue == null) {
return new PerformanceState();
}
- // swap queue of WorkerPerformanceState for this testCaseId
- Queue<WorkerPerformanceState> oldQueue = queueReference.getAndSet(new ConcurrentLinkedQueue<WorkerPerformanceState>());
-
// aggregate the PerformanceState instances per Worker by maximum values (since from same Worker)
Map<SimulatorAddress, PerformanceState> workerPerformanceStateMap = new HashMap<SimulatorAddress, PerformanceState>();
- for (WorkerPerformanceState workerPerformanceState : oldQueue) {
- PerformanceState candidate = workerPerformanceStateMap.get(workerPerformanceState.simulatorAddress);
+ for (; ; ) {
+ WorkerPerformanceState pending = pendingQueue.poll();
+ if (pending == null) {
+ // we have drained the queue of pending work, so we are ready
+ break;
+ }
+
+ PerformanceState candidate = workerPerformanceStateMap.get(pending.simulatorAddress);
if (candidate == null) {
- workerPerformanceStateMap.put(workerPerformanceState.simulatorAddress, workerPerformanceState.performanceState);
+ workerPerformanceStateMap.put(pending.simulatorAddress, pending.performanceState);
} else {
- candidate.add(workerPerformanceState.performanceState, false);
+ candidate.add(pending.performanceState, false);
}
}
| ['simulator/src/main/java/com/hazelcast/simulator/coordinator/PerformanceStateContainer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 929,484 | 188,147 | 24,481 | 238 | 3,145 | 562 | 42 | 1 | 221 | 37 | 44 | 2 | 0 | 0 | 1970-01-01T00:24:25 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
347 | hazelcast/hazelcast-simulator/707/699 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/699 | https://github.com/hazelcast/hazelcast-simulator/pull/707 | https://github.com/hazelcast/hazelcast-simulator/pull/707 | 1 | fix | InterruptedException in PerformanceThread.run() | Happens from time to time when using external clients (don't know if it will fail on regular tests as well). It's after the testsuite completes, so performance results and probes are written. Maybe it's just not shutdown properly.
```
FATAL 13:58:43 Exception in PerformanceThread.run()
java.lang.RuntimeException: java.lang.InterruptedException
at com.hazelcast.simulator.coordinator.remoting.AgentsClient.getAllFutures(AgentsClient.java:394)
at com.hazelcast.simulator.coordinator.remoting.AgentsClient.executeOnAllWorkersDetailed(AgentsClient.java:333)
at com.hazelcast.simulator.coordinator.PerformanceMonitor$PerformanceThread.checkPerformance(PerformanceMonitor.java:110)
at com.hazelcast.simulator.coordinator.PerformanceMonitor$PerformanceThread.run(PerformanceMonitor.java:99)
Caused by: java.lang.InterruptedException
at java.util.concurrent.FutureTask.awaitDone(FutureTask.java:404)
at java.util.concurrent.FutureTask.get(FutureTask.java:204)
at com.hazelcast.simulator.coordinator.remoting.AgentsClient.getAllFutures(AgentsClient.java:381)
... 3 more
```
| 76278f9af558824f99db2bbed90e23f072a48d2c | 9aedd37210ec8519c8821ff4666c20453128cf0b | https://github.com/hazelcast/hazelcast-simulator/compare/76278f9af558824f99db2bbed90e23f072a48d2c...9aedd37210ec8519c8821ff4666c20453128cf0b | diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/remoting/AgentsClient.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/remoting/AgentsClient.java
index cc5b5a738..b961bbe36 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/remoting/AgentsClient.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/remoting/AgentsClient.java
@@ -11,6 +11,7 @@ import com.hazelcast.simulator.test.Failure;
import com.hazelcast.simulator.test.TestPhase;
import com.hazelcast.simulator.test.TestSuite;
import com.hazelcast.simulator.utils.CommandLineExitException;
+import com.hazelcast.simulator.utils.EmptyStatement;
import com.hazelcast.simulator.worker.commands.Command;
import com.hazelcast.simulator.worker.commands.IsPhaseCompletedCommand;
import org.apache.log4j.Logger;
@@ -391,7 +392,9 @@ public class AgentsClient {
}
throw new RuntimeException(e);
} catch (InterruptedException e) {
- throw new RuntimeException(e);
+ LOGGER.info("Got interrupted while waiting for results from Agents!");
+ EmptyStatement.ignore(e);
+ break;
}
}
return resultList; | ['simulator/src/main/java/com/hazelcast/simulator/coordinator/remoting/AgentsClient.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 604,552 | 120,494 | 16,628 | 173 | 256 | 39 | 5 | 1 | 1,100 | 67 | 254 | 16 | 0 | 1 | 1970-01-01T00:23:56 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
348 | hazelcast/hazelcast-simulator/644/643 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/643 | https://github.com/hazelcast/hazelcast-simulator/pull/644 | https://github.com/hazelcast/hazelcast-simulator/pull/644 | 1 | fixes | IllegalThreadStateException for WorkerPerformanceMonitorThread | ```
Failure[
message='Worked ran into an unhandled exception'
type='Worker exception'
agentAddress=10.184.183.169
time=Tue May 19 15:52:34 UTC 2015
workerAddress=10.184.183.169:5702
workerId=worker-10.184.183.169-1-server
test=null unknown
cause=java.lang.IllegalThreadStateException
at java.lang.Thread.start(Thread.java:705)
at com.hazelcast.simulator.worker.WorkerPerformanceMonitor.start(WorkerPerformanceMonitor.java:36)
at com.hazelcast.simulator.worker.WorkerCommandRequestProcessor$WorkerCommandRequestProcessorThread.process(WorkerCommandRequestProcessor.java:195)
at com.hazelcast.simulator.worker.WorkerCommandRequestProcessor$WorkerCommandRequestProcessorThread.doProcess(WorkerCommandRequestProcessor.java:121)
at com.hazelcast.simulator.worker.WorkerCommandRequestProcessor$WorkerCommandRequestProcessorThread.run(WorkerCommandRequestProcessor.java:106)
]
```
why for this super complex start?
```
boolean start() {
if (!thread.running) {
synchronized (this) {
if (!thread.running) {
thread.running = true;
thread.start();
return true;
}
}
}
return false;
}
```
| 7e4e7522e008a12fb2374ba28389b719500b4dfa | 80dbe23bb577efeca7e6cb999f50a39022be532e | https://github.com/hazelcast/hazelcast-simulator/compare/7e4e7522e008a12fb2374ba28389b719500b4dfa...80dbe23bb577efeca7e6cb999f50a39022be532e | diff --git a/simulator/src/main/java/com/hazelcast/simulator/worker/WorkerPerformanceMonitor.java b/simulator/src/main/java/com/hazelcast/simulator/worker/WorkerPerformanceMonitor.java
index a25a19ad6..676ab5f9e 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/worker/WorkerPerformanceMonitor.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/worker/WorkerPerformanceMonitor.java
@@ -10,6 +10,7 @@ import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.simulator.utils.CommonUtils.fillString;
import static com.hazelcast.simulator.utils.CommonUtils.formatDouble;
@@ -22,33 +23,29 @@ import static java.lang.String.format;
*/
class WorkerPerformanceMonitor {
- private final WorkerPerformanceMonitorThread workerPerformanceMonitorThread;
+ private final MonitorThread thread;
+ private final AtomicBoolean started = new AtomicBoolean();
WorkerPerformanceMonitor(Collection<TestContainer<TestContext>> testContainers) {
- workerPerformanceMonitorThread = new WorkerPerformanceMonitorThread(testContainers);
+ thread = new MonitorThread(testContainers);
}
boolean start() {
- if (!workerPerformanceMonitorThread.running) {
- synchronized (this) {
- if (!workerPerformanceMonitorThread.running) {
- workerPerformanceMonitorThread.running = true;
- workerPerformanceMonitorThread.start();
-
- return true;
- }
- }
+ if (!started.compareAndSet(false, true)) {
+ return false;
}
- return false;
+
+ thread.start();
+ return true;
}
void stop() {
- workerPerformanceMonitorThread.running = false;
+ thread.stop = true;
}
- private static final class WorkerPerformanceMonitorThread extends Thread {
+ private static final class MonitorThread extends Thread {
- private static final Logger LOGGER = Logger.getLogger(WorkerPerformanceMonitorThread.class);
+ private static final Logger LOGGER = Logger.getLogger(MonitorThread.class);
private final File globalPerformanceFile = new File("performance.txt");
private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
@@ -58,9 +55,9 @@ class WorkerPerformanceMonitor {
private long globalLastOpsCount;
private long globalLastTimeMillis = System.currentTimeMillis();
- private volatile boolean running;
+ private volatile boolean stop;
- private WorkerPerformanceMonitorThread(Collection<TestContainer<TestContext>> testContainers) {
+ private MonitorThread(Collection<TestContainer<TestContext>> testContainers) {
super("WorkerPerformanceMonitorThread");
setDaemon(true);
@@ -71,7 +68,7 @@ class WorkerPerformanceMonitor {
@Override
public void run() {
- while (running) {
+ while (!stop) {
try {
Thread.sleep(5000);
writeStatsToFiles(); | ['simulator/src/main/java/com/hazelcast/simulator/worker/WorkerPerformanceMonitor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 588,758 | 117,228 | 16,187 | 171 | 1,641 | 260 | 33 | 1 | 1,289 | 66 | 287 | 37 | 0 | 2 | 1970-01-01T00:23:52 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
349 | hazelcast/hazelcast-simulator/631/629 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/629 | https://github.com/hazelcast/hazelcast-simulator/pull/631 | https://github.com/hazelcast/hazelcast-simulator/pull/631 | 1 | fixes | NPE in Provisioner when Simulator was compiled without git metadata | When you e.g. download the Simulator sources as ZIP archive from GitHub there is no git repository/metadata available.
If you compile Simulator and use it, the `Provisioner` fails with a NPE, because it tries to print the git revision of Simulator, which is not available in that case.
The issue will be the underlaying `GitInfo` which should not fail in that case.
```
INFO 16:27:13 Hazelcast Simulator Provisioner
Exception in thread "main" java.lang.ExceptionInInitializerError
at com.hazelcast.simulator.provisioner.Provisioner.main(Provisioner.java:457)
Caused by: java.lang.NullPointerException
at java.util.Properties$LineReader.readLine(Properties.java:434)
at java.util.Properties.load0(Properties.java:353)
at java.util.Properties.load(Properties.java:341)
at com.hazelcast.simulator.common.GitInfo.loadGitProperties(GitInfo.java:56)
at com.hazelcast.simulator.common.GitInfo.<init>(GitInfo.java:28)
at com.hazelcast.simulator.common.GitInfo.<clinit>(GitInfo.java:23)
... 1 more
```
| 2cd2b5f6ec58a86e6287b392c74fa48435067f3e | 6dde8d581d726021da748f499628911e2905abf8 | https://github.com/hazelcast/hazelcast-simulator/compare/2cd2b5f6ec58a86e6287b392c74fa48435067f3e...6dde8d581d726021da748f499628911e2905abf8 | diff --git a/simulator/src/main/java/com/hazelcast/simulator/common/GitInfo.java b/simulator/src/main/java/com/hazelcast/simulator/common/GitInfo.java
index 8fa9a3a90..783fd426b 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/common/GitInfo.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/common/GitInfo.java
@@ -9,14 +9,16 @@ import java.util.Properties;
import static com.hazelcast.simulator.utils.CommonUtils.closeQuietly;
public final class GitInfo {
- private static final String GIT_INFO_FILE = "simulator-git.properties";
- private static final String UNKNOWN = "Unknown";
- private static final String GIT_COMMIT_ID_AABREV = "git.commit.id.abbrev";
- private static final String GIT_COMMIT_ID = "git.commit.id";
- private static final String GIT_COMMIT_TIME = "git.commit.time";
- private static final String GIT_BUILD_TIME = "git.build.time";
- private static final String GIT_REMOTE_ORIGIN_URL = "git.remote.origin.url";
+ static final String GIT_INFO_FILE = "simulator-git.properties";
+
+ static final String UNKNOWN = "Unknown";
+
+ static final String GIT_COMMIT_ID_AABREV = "git.commit.id.abbrev";
+ static final String GIT_COMMIT_ID = "git.commit.id";
+ static final String GIT_COMMIT_TIME = "git.commit.time";
+ static final String GIT_BUILD_TIME = "git.build.time";
+ static final String GIT_REMOTE_ORIGIN_URL = "git.remote.origin.url";
private static final Logger LOGGER = Logger.getLogger(GitInfo.class);
@@ -25,7 +27,7 @@ public final class GitInfo {
private final Properties properties;
private GitInfo() {
- properties = loadGitProperties();
+ properties = loadGitProperties(GIT_INFO_FILE);
}
public static String getCommitIdAbbrev() {
@@ -48,22 +50,26 @@ public final class GitInfo {
return INSTANCE.properties.getProperty(GIT_REMOTE_ORIGIN_URL, UNKNOWN);
}
- private Properties loadGitProperties() {
- Properties properties = new Properties();
+ static Properties loadGitProperties(String fileName) {
InputStream gitPropsStream = null;
try {
- gitPropsStream = getClass().getClassLoader().getResourceAsStream(GIT_INFO_FILE);
+ gitPropsStream = GitInfo.class.getClassLoader().getResourceAsStream(fileName);
+ if (gitPropsStream == null) {
+ return new DummyProperties();
+ }
+ Properties properties = new Properties();
properties.load(gitPropsStream);
+ return properties;
} catch (IOException e) {
- LOGGER.warn("Error while loading Git properties.", e);
- properties = new DummyProperties();
+ LOGGER.warn("Error while loading Git properties from " + fileName, e);
+ return new DummyProperties();
} finally {
closeQuietly(gitPropsStream);
}
- return properties;
}
- private static class DummyProperties extends Properties {
+ static class DummyProperties extends Properties {
+
@Override
public String getProperty(String key) {
return UNKNOWN;
diff --git a/simulator/src/test/java/com/hazelcast/simulator/common/GitInfoTest.java b/simulator/src/test/java/com/hazelcast/simulator/common/GitInfoTest.java
index 7754d27e3..a4e552683 100644
--- a/simulator/src/test/java/com/hazelcast/simulator/common/GitInfoTest.java
+++ b/simulator/src/test/java/com/hazelcast/simulator/common/GitInfoTest.java
@@ -2,7 +2,12 @@ package com.hazelcast.simulator.common;
import org.junit.Test;
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
public class GitInfoTest {
@@ -30,4 +35,31 @@ public class GitInfoTest {
public void testGetRemoteOriginUrl() throws Exception {
assertNotNull(GitInfo.getRemoteOriginUrl());
}
-}
\\ No newline at end of file
+
+ @Test
+ public void testLoadProperties() {
+ Properties properties = GitInfo.loadGitProperties(GitInfo.GIT_INFO_FILE);
+ assertNotNull(properties);
+ assertFalse(properties instanceof GitInfo.DummyProperties);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testLoadProperties_null() {
+ GitInfo.loadGitProperties(null);
+ }
+
+ @Test
+ public void testLoadProperties_notExists() {
+ Properties properties = GitInfo.loadGitProperties("notExists");
+ assertNotNull(properties);
+ assertTrue(properties instanceof GitInfo.DummyProperties);
+
+ assertEquals(GitInfo.UNKNOWN, properties.getProperty(GitInfo.GIT_COMMIT_ID_AABREV));
+ assertEquals(GitInfo.UNKNOWN, properties.getProperty(GitInfo.GIT_COMMIT_ID));
+ assertEquals(GitInfo.UNKNOWN, properties.getProperty(GitInfo.GIT_COMMIT_TIME));
+ assertEquals(GitInfo.UNKNOWN, properties.getProperty(GitInfo.GIT_BUILD_TIME));
+ assertEquals(GitInfo.UNKNOWN, properties.getProperty(GitInfo.GIT_REMOTE_ORIGIN_URL));
+
+ assertEquals("default", properties.getProperty(GitInfo.GIT_COMMIT_ID_AABREV, "default"));
+ }
+} | ['simulator/src/main/java/com/hazelcast/simulator/common/GitInfo.java', 'simulator/src/test/java/com/hazelcast/simulator/common/GitInfoTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 582,238 | 115,719 | 15,988 | 166 | 1,967 | 365 | 36 | 1 | 1,029 | 94 | 253 | 20 | 0 | 1 | 1970-01-01T00:23:51 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
351 | hazelcast/hazelcast-simulator/321/320 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/320 | https://github.com/hazelcast/hazelcast-simulator/pull/321 | https://github.com/hazelcast/hazelcast-simulator/pull/321 | 1 | fixes | Performance measuring problem | ```
INFO 08:29:24 Operation-count: -3
INFO 08:29:24 Performance: -0.05 ops/s
```
One test failed and I got this output. How can operation count be negative.
| 1515bb7106e12486ca7183adac469b11082bc112 | 015fbde9d9b4da0cd11a315b721618ef1658a277 | https://github.com/hazelcast/hazelcast-simulator/compare/1515bb7106e12486ca7183adac469b11082bc112...015fbde9d9b4da0cd11a315b721618ef1658a277 | diff --git a/stabilizer/src/main/java/com/hazelcast/stabilizer/coordinator/TestCaseRunner.java b/stabilizer/src/main/java/com/hazelcast/stabilizer/coordinator/TestCaseRunner.java
index a36cb50ce..24d6edae5 100644
--- a/stabilizer/src/main/java/com/hazelcast/stabilizer/coordinator/TestCaseRunner.java
+++ b/stabilizer/src/main/java/com/hazelcast/stabilizer/coordinator/TestCaseRunner.java
@@ -140,15 +140,21 @@ public class TestCaseRunner {
for (Map.Entry<String, R> entry : combinedResults.entrySet()) {
String probeName = entry.getKey();
R result = entry.getValue();
- echo("Probe " + probeName + " result: "+result.toHumanString());
+ echo("Probe " + probeName + " result: " + result.toHumanString());
}
}
private void logPerformance() {
if (coordinator.monitorPerformance) {
- log.info("Operation-count: " + performanceFormat.format(coordinator.operationCount));
- double performance = (coordinator.operationCount * 1.0d) / testSuite.duration;
- log.info("Performance: " + performanceFormat.format(performance) + " ops/s");
+ long operationCount = coordinator.operationCount;
+ if (operationCount < 0) {
+ log.info("Operation-count: not available");
+ log.info("Performance: not available");
+ } else {
+ log.info("Operation-count: " + performanceFormat.format(operationCount));
+ double performance = (operationCount * 1.0d) / testSuite.duration;
+ log.info("Performance: " + performanceFormat.format(performance) + " ops/s");
+ }
}
}
@@ -176,7 +182,11 @@ public class TestCaseRunner {
String msg = format("Running %s, %-4.2f percent complete", secondsToHuman(elapsed), percentage);
if (coordinator.monitorPerformance) {
- msg += ", " + performanceFormat.format(coordinator.performance) + " ops/s.";
+ if (coordinator.operationCount < 0) {
+ msg += ", performance not available";
+ } else {
+ msg += ", " + performanceFormat.format(coordinator.performance) + " ops/s.";
+ }
}
log.info(prefix + msg); | ['stabilizer/src/main/java/com/hazelcast/stabilizer/coordinator/TestCaseRunner.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 318,623 | 63,100 | 9,027 | 94 | 1,318 | 254 | 20 | 1 | 160 | 25 | 51 | 7 | 0 | 1 | 1970-01-01T00:23:31 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
352 | hazelcast/hazelcast-simulator/313/312 | hazelcast | hazelcast-simulator | https://github.com/hazelcast/hazelcast-simulator/issues/312 | https://github.com/hazelcast/hazelcast-simulator/pull/313 | https://github.com/hazelcast/hazelcast-simulator/pull/313 | 1 | fixes | Failed to execute expir.run(), and Stabilizer Reports No exceptions, run as Normal | The logs show
```
/worker-10.233.78.201-2-client/worker.log
INFO 2014-09-15 14:38:07,554 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test expir ---------------------------
id=expir
INFO 2014-09-15 14:38:08,720 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.setup() ------------------------------------
INFO 2014-09-15 14:38:08,813 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.setup() ---------------------------
INFO 2014-09-15 14:38:10,731 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localWarmup() ------------------------------------
INFO 2014-09-15 14:38:10,731 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localWarmup() ---------------------------
INFO 2014-09-15 14:38:13,745 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.run() ------------------------------------
FATAL 2014-09-15 14:38:13,782 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Failed to execute expir.run() ------------------------------------
INFO 2014-09-15 14:40:14,915 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- expir.stop() ------------------------------------
INFO 2014-09-15 14:40:24,955 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localVerify() ------------------------------------
INFO 2014-09-15 14:40:24,955 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localVerify() ---------------------------
INFO 2014-09-15 14:40:28,971 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localTeardown() ------------------------------------
INFO 2014-09-15 14:40:28,971 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localTeardown() ---------------------------
```
```
./worker-10.233.78.201-1-server/worker.log
INFO 2014-09-15 14:38:06,938 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Initializing test expir ---------------------------
id=expir
INFO 2014-09-15 14:38:08,079 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.setup() ------------------------------------
INFO 2014-09-15 14:38:08,097 [Thread-2] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.setup() ---------------------------
INFO 2014-09-15 14:38:10,090 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localWarmup() ------------------------------------
INFO 2014-09-15 14:38:10,090 [Thread-3] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localWarmup() ---------------------------
INFO 2014-09-15 14:38:12,098 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.globalWarmup() ------------------------------------
INFO 2014-09-15 14:38:12,125 [Thread-4] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.globalWarmup() ---------------------------
INFO 2014-09-15 14:38:14,108 [Thread-5] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Skipping expir.run(); member is passive ------------------------------------
INFO 2014-09-15 14:40:14,281 [Thread-0] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- expir.stop() ------------------------------------
INFO 2014-09-15 14:40:17,300 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.globalVerify() ------------------------------------
INFO 2014-09-15 14:40:19,312 [Thread-6] com.hazelcast.stabilizer.tests.icache.ExpiryTest: expir: Counter{putExpiry=0, putAsyncExpiry=0, getExpiry=0, getAsyncExpiry=0} from 0 worker Threads
INFO 2014-09-15 14:40:19,890 [Thread-6] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.globalVerify() ---------------------------
INFO 2014-09-15 14:40:24,323 [Thread-7] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localVerify() ------------------------------------
INFO 2014-09-15 14:40:24,323 [Thread-7] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localVerify() ---------------------------
INFO 2014-09-15 14:40:26,331 [Thread-8] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.globalTeardown() ------------------------------------
INFO 2014-09-15 14:40:26,331 [Thread-8] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.globalTeardown() ---------------------------
INFO 2014-09-15 14:40:29,363 [Thread-9] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Starting expir.localTeardown() ------------------------------------
INFO 2014-09-15 14:40:29,363 [Thread-9] com.hazelcast.stabilizer.worker.MemberWorker: --------------------------- Finished expir.localTeardown() ---------------------------
```
```
INFO 2014-09-15 14:40:19,312 [Thread-6] com.hazelcast.stabilizer.tests.icache.ExpiryTest: expir: Counter{putExpiry=0, putAsyncExpiry=0, getExpiry=0, getAsyncExpiry=0} from 0 worker Threads
```
While Stabilizer shows
```
INFO 14:38:06 Running time per test: 00d 00h 02m 00s
INFO 14:38:06 Expected total testsuite time: 00d 00h 02m 00s
INFO 14:38:06 Running 1 tests parallel
INFO 14:38:06 --------------------------------------------------------------
Running Test : expir
TestCase{
id=expir
, class=com.hazelcast.stabilizer.tests.icache.ExpiryTest
}
--------------------------------------------------------------
INFO 14:38:06 expir expir Starting Test initialization
INFO 14:38:07 expir Completed Test initialization
INFO 14:38:07 expir Starting Test setup
INFO 14:38:09 expir Completed Test setup
INFO 14:38:09 expir Starting Test local warmup
INFO 14:38:11 expir Completed Test local warmup
INFO 14:38:11 expir Starting Test global warmup
INFO 14:38:13 expir Completed Test global warmup
INFO 14:38:13 expir Starting Test start
INFO 14:38:14 expir Completed Test start
INFO 14:38:14 expir Test will run for 00d 00h 02m 00s
INFO 14:38:44 expir Running 00d 00h 00m 30s, 25.00 percent complete
INFO 14:39:14 expir Running 00d 00h 01m 00s, 50.00 percent complete
INFO 14:39:44 expir Running 00d 00h 01m 30s, 75.00 percent complete
INFO 14:40:14 expir Running 00d 00h 02m 00s, 100.00 percent complete
INFO 14:40:14 expir Test finished running
INFO 14:40:14 expir Starting Test stop
INFO 14:40:16 expir Completed Test stop
INFO 14:40:17 expir Starting Test global verify
INFO 14:40:18 expir Waiting for globalVerify completion: 00d 00h 00m 01s
INFO 14:40:24 expir Completed Test global verify
INFO 14:40:24 expir Starting Test local verify
INFO 14:40:26 expir Completed Test local verify
INFO 14:40:26 expir Starting Test global tear down
INFO 14:40:27 expir Finished Test global tear down
INFO 14:40:27 expir Starting Test local tear down
INFO 14:40:29 expir Completed Test local tear down
INFO 14:40:29 Terminating workers
INFO 14:40:29 All workers have been terminated
INFO 14:40:29 Starting cool down (10 sec)
INFO 14:40:39 Finished cool down
INFO 14:40:39 Total running time: 152 seconds
INFO 14:40:39 -----------------------------------------------------------------------------
INFO 14:40:39 No failures have been detected!
INFO 14:40:39 -----------------------------------------------------------------------------
```
| 8ec51f52c5edb39e708098948540323abf50d4a1 | d6e5ec6245617e6dde4620fe0e170bcc29480a9d | https://github.com/hazelcast/hazelcast-simulator/compare/8ec51f52c5edb39e708098948540323abf50d4a1...d6e5ec6245617e6dde4620fe0e170bcc29480a9d | diff --git a/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java b/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
index fd833ea0a..47117e9e8 100644
--- a/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
+++ b/stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java
@@ -379,11 +379,12 @@ public class MemberWorker {
log.info(format("--------------------------- Completed %s.run() " +
"------------------------------------",
testName));
- } catch (Throwable t) {
+ } catch (InvocationTargetException e) {
String msg = format("--------------------------- Failed to execute %s.run() " +
"------------------------------------",
testName);
- log.severe(msg, t);
+ log.severe(msg, e.getCause());
+ throw e.getCause();
}
}
} | ['stabilizer/src/main/java/com/hazelcast/stabilizer/worker/MemberWorker.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 318,827 | 63,195 | 9,025 | 94 | 291 | 41 | 5 | 1 | 7,906 | 603 | 2,175 | 96 | 0 | 4 | 1970-01-01T00:23:30 | 82 | Java | {'Java': 2059436, 'Python': 155276, 'HCL': 41145, 'Shell': 18850, 'FreeMarker': 5802} | Apache License 2.0 |
3,550 | osgi/osgi/393/392 | osgi | osgi | https://github.com/osgi/osgi/issues/392 | https://github.com/osgi/osgi/pull/393 | https://github.com/osgi/osgi/pull/393 | 1 | fixes | Converter's DTOUtil is overly reliant on exceptions for normal execution flow | DTOUtil.isDTOType() [1] relies on exceptions to find out whether a class has a public constructor and to find out if it has any methods - as a DTO should have a public no-args constructor and no methods beyond the ones defined by Object.class.
This works but is wasteful and slow. We should implement a different strategy to identify a DTO class as such.
[1] https://github.com/osgi/osgi/blob/main/org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java | 5ef5da1c414217f7b55904b114a1fe5399000d42 | 002c3a9bf4b34709449b336def007b2acf820195 | https://github.com/osgi/osgi/compare/5ef5da1c414217f7b55904b114a1fe5399000d42...002c3a9bf4b34709449b336def007b2acf820195 | diff --git a/org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java b/org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java
index 08023923fa..8f4df68dfe 100644
--- a/org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java
+++ b/org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java
@@ -18,45 +18,43 @@
package org.osgi.util.converter;
+import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
+import java.util.Arrays;
/**
* @author $Id$
*/
class DTOUtil {
+ private static final Method[] OBJECT_CLASS_METHODS = Object.class
+ .getMethods();
+
private DTOUtil() {
// Do not instantiate. This is a utility class.
}
static boolean isDTOType(Class< ? > cls, boolean ignorePublicNoArgsCtor) {
if (!ignorePublicNoArgsCtor) {
- try {
- cls.getConstructor();
- } catch (NoSuchMethodException | SecurityException e) {
+ if (Arrays.stream(cls.getConstructors())
+ .noneMatch(ctor -> ctor.getParameterCount() == 0)) {
// No public zero-arg constructor, not a DTO
return false;
}
}
for (Method m : cls.getMethods()) {
- try {
- Object.class.getMethod(m.getName(), m.getParameterTypes());
- } catch (NoSuchMethodException snme) {
+ if (Arrays.stream(OBJECT_CLASS_METHODS)
+ .noneMatch(om -> om.getName().equals(m.getName())
+ && Arrays.equals(om.getParameterTypes(),
+ m.getParameterTypes()))) {
// Not a method defined by Object.class (or override of such
// method)
return false;
}
}
- /*
- * for (Field f : cls.getDeclaredFields()) { int modifiers =
- * f.getModifiers(); if (Modifier.isStatic(modifiers)) { // ignore
- * static fields continue; } if (!Modifier.isPublic(modifiers)) { return
- * false; } }
- */
-
boolean foundField = false;
for (Field f : cls.getFields()) {
int modifiers = f.getModifiers(); | ['org.osgi.util.converter/src/org/osgi/util/converter/DTOUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,354,242 | 1,484,750 | 190,548 | 1,391 | 900 | 208 | 24 | 1 | 469 | 65 | 106 | 5 | 1 | 0 | 1970-01-01T00:27:20 | 80 | Java | {'Java': 17339591, 'XSLT': 8082406, 'HTML': 1425271, 'JavaScript': 684425, 'CSS': 63392, 'Shell': 48677, 'Batchfile': 11308, 'C': 11077, 'Ruby': 10888, 'Python': 9377, 'Makefile': 8343, 'Perl': 7202, 'SystemVerilog': 5465, 'NewLisp': 4473, 'Roff': 64} | Apache License 2.0 |
367 | opencb/cellbase/30/22 | opencb | cellbase | https://github.com/opencb/cellbase/issues/22 | https://github.com/opencb/cellbase/pull/30 | https://github.com/opencb/cellbase/pull/30 | 1 | fix | Population frequencies web services must return all frequencies | When querying population frequencies like:
http://wwwdev.ebi.ac.uk/cellbase/webservices/rest/v3/hsapiens/genomic/region/3:1166675-1166675/snp
Only frequencies different from '1' are returned. MongoDB has to contain only those.
| e822567075b2bb5bf7787e54d2047ae4065289e0 | 25df6dfdb03081eb25d519744841bed0c55976f3 | https://github.com/opencb/cellbase/compare/e822567075b2bb5bf7787e54d2047ae4065289e0...25df6dfdb03081eb25d519744841bed0c55976f3 | diff --git a/cellbase-app/src/main/java/org/opencb/cellbase/app/transform/VariationParser.java b/cellbase-app/src/main/java/org/opencb/cellbase/app/transform/VariationParser.java
index 31276c943..03b9d0ad5 100644
--- a/cellbase-app/src/main/java/org/opencb/cellbase/app/transform/VariationParser.java
+++ b/cellbase-app/src/main/java/org/opencb/cellbase/app/transform/VariationParser.java
@@ -38,6 +38,16 @@ public class VariationParser extends CellBaseParser {
private static final int TRANSCRIPT_VARIATION_FILE_ID = 1;
private static final int VARIATION_SYNONYM_FILE_ID = 2;
+ private static final String THOUSAND_GENOMES_STUDY = "1000GENOMES";
+ private static final String ESP_6500_STUDY = "ESP_6500";
+ private static final String THOUSAND_GENOMES_ALL_POPULATION = "phase_1_ALL";
+ private static final String THOUSAND_GENOMES_AMERICAN_POPULATION = "phase_1_AMR";
+ private static final String THOUSAND_GENOMES_ASIAN_POPULATION = "phase_1_ASN";
+ private static final String THOUSAND_GENOMES_AFRICAN_POPULATION = "phase_1_AFR";
+ private static final String THOUSAND_GENOMES_EUROPEAN_POPULATION = "phase_1_EUR";
+ private static final String ESP_EUROPEAN_AMERICAN_POPULATION = "European_American";
+ private static final String ESP_AFRICAN_AMERICAN_POPULATION = "African_American";
+
private Path variationDirectoryPath;
private BufferedReader variationSynonymsFileReader;
@@ -61,11 +71,13 @@ public class VariationParser extends CellBaseParser {
private static final String REFERENCE_FREQUENCY_GROUP = "ref";
private static final String ALTERNATE_FREQUENCY_GROUP = "alt";
private TabixReader frequenciesTabixReader;
+ private final Set<String> thousandGenomesMissedPopulations;
public VariationParser(Path variationDirectoryPath, CellBaseSerializer serializer) {
super(serializer);
this.variationDirectoryPath = variationDirectoryPath;
populationFrequnciesPattern = Pattern.compile("(?<" + POPULATION_ID_GROUP + ">\\\\w+):(?<" + REFERENCE_FREQUENCY_GROUP + ">\\\\d+.\\\\d+),(?<" + ALTERNATE_FREQUENCY_GROUP + ">\\\\d+.\\\\d+)");
+ thousandGenomesMissedPopulations = new HashSet<>();
}
@Override
@@ -463,6 +475,7 @@ public class VariationParser extends CellBaseParser {
for (String populationFrequency : variationFrequenciesString.split(";")) {
frequencies.add(parsePopulationFrequency(populationFrequency, referenceAllele, alternativeAllele));
}
+ frequencies = add1000GenomesMissedPopulations(frequencies);
return frequencies;
}
@@ -472,28 +485,36 @@ public class VariationParser extends CellBaseParser {
if (m.matches()) {
String populationName;
+ String study = "";
String population = m.group(POPULATION_ID_GROUP);
switch (population) {
case "1000G_AF":
- populationName = "1000GENOMES:phase_1_ALL";
+ study = THOUSAND_GENOMES_STUDY;
+ populationName = THOUSAND_GENOMES_ALL_POPULATION;
break;
case "1000G_AMR_AF":
- populationName = "1000GENOMES:phase_1_AMR";
+ study = THOUSAND_GENOMES_STUDY;
+ populationName = THOUSAND_GENOMES_AMERICAN_POPULATION;
break;
case "1000G_ASN_AF":
- populationName = "1000GENOMES:phase_1_ASN";
+ study = THOUSAND_GENOMES_STUDY;
+ populationName = THOUSAND_GENOMES_ASIAN_POPULATION;
break;
case "1000G_AFR_AF":
- populationName = "1000GENOMES:phase_1_AFR";
+ study = THOUSAND_GENOMES_STUDY;
+ populationName = THOUSAND_GENOMES_AFRICAN_POPULATION;
break;
case "1000G_EUR_AF":
- populationName = "1000GENOMES:phase_1_EUR";
+ study = THOUSAND_GENOMES_STUDY;
+ populationName = THOUSAND_GENOMES_EUROPEAN_POPULATION;
break;
case "ESP_EA_AF":
- populationName = "ESP6500:European_American";
+ study = ESP_6500_STUDY;
+ populationName = ESP_EUROPEAN_AMERICAN_POPULATION;
break;
case "ESP_AA_AF":
- populationName = "ESP6500:African_American";
+ study = ESP_6500_STUDY;
+ populationName = ESP_AFRICAN_AMERICAN_POPULATION;
break;
default:
populationName = population;
@@ -501,12 +522,41 @@ public class VariationParser extends CellBaseParser {
Float referenceFrequency = Float.parseFloat(m.group(REFERENCE_FREQUENCY_GROUP));
Float alternativeFrequency = Float.parseFloat(m.group(ALTERNATE_FREQUENCY_GROUP));
- populationFrequency = new PopulationFrequency(populationName, referenceAllele, alternativeAllele, referenceFrequency, alternativeFrequency);
+ populationFrequency = new PopulationFrequency(study, populationName, populationName, referenceAllele, alternativeAllele, referenceFrequency, alternativeFrequency);
}
return populationFrequency;
}
+ private List<PopulationFrequency> add1000GenomesMissedPopulations(List<PopulationFrequency> frequencies) {
+ thousandGenomesMissedPopulations.add(THOUSAND_GENOMES_AFRICAN_POPULATION);
+ thousandGenomesMissedPopulations.add(THOUSAND_GENOMES_AMERICAN_POPULATION);
+ thousandGenomesMissedPopulations.add(THOUSAND_GENOMES_EUROPEAN_POPULATION);
+ thousandGenomesMissedPopulations.add(THOUSAND_GENOMES_ASIAN_POPULATION);
+ int thousandGenomesPopulationsNumber = thousandGenomesMissedPopulations.size();
+
+ String refAllele = null;
+ String altAllele = null;
+ for (PopulationFrequency frequency : frequencies) {
+ if (frequency.getStudy()!= null && frequency.getStudy().equals(THOUSAND_GENOMES_STUDY)) {
+ if (frequency.getPop().equals(THOUSAND_GENOMES_ALL_POPULATION)) {
+ refAllele = frequency.getRefAllele();
+ altAllele = frequency.getAltAllele();
+ }
+ thousandGenomesMissedPopulations.remove(frequency.getPop());
+ }
+ }
+
+ // if the variation has some 1000 genomes superpopulation frequency, but not all, add the missed superpopulations with 1 as ref allele proportion
+ if (!thousandGenomesMissedPopulations.isEmpty() && thousandGenomesMissedPopulations.size() != thousandGenomesPopulationsNumber) {
+ for (String population : thousandGenomesMissedPopulations) {
+ frequencies.add(new PopulationFrequency(THOUSAND_GENOMES_STUDY, population, population, refAllele, altAllele, 1, 0));
+ }
+ }
+
+ return frequencies;
+ }
+
private List<TranscriptVariation> getTranscriptVariations(int variationId, String variationFeatureId) throws IOException, SQLException {
// Note the ID used, TranscriptVariation references to VariationFeature no Variation !!!
List<TranscriptVariation> transcriptVariation = new ArrayList<>();
diff --git a/cellbase-core/src/main/java/org/opencb/cellbase/core/serializer/DefaultJsonSerializer.java b/cellbase-core/src/main/java/org/opencb/cellbase/core/serializer/DefaultJsonSerializer.java
index c99986641..2e20c93a1 100644
--- a/cellbase-core/src/main/java/org/opencb/cellbase/core/serializer/DefaultJsonSerializer.java
+++ b/cellbase-core/src/main/java/org/opencb/cellbase/core/serializer/DefaultJsonSerializer.java
@@ -28,7 +28,6 @@ import java.nio.file.Path;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
-import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
/**
@@ -116,7 +115,9 @@ public class DefaultJsonSerializer extends CellBaseSerializer {
public void serialize(Variation variation) {
try {
if(variationWriters.get(variation.getChromosome()) == null) {
- variationWriters.put(variation.getChromosome(), Files.newBufferedWriter(outdirPath.resolve("variation_chr" + variation.getChromosome() + ".json"), Charset.defaultCharset()));
+ Path outputFilePath = outdirPath.resolve("variation_chr" + variation.getChromosome() + ".json.gz");
+ BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(Files.newOutputStream(outputFilePath))));
+ variationWriters.put(variation.getChromosome(), bw);
}
variationWriters.get(variation.getChromosome()).write(jsonObjectWriter.writeValueAsString(variation));
variationWriters.get(variation.getChromosome()).newLine();
diff --git a/cellbase-mongodb/src/main/java/org/opencb/cellbase/lib/mongodb/serializer/MongoDBSerializer.java b/cellbase-mongodb/src/main/java/org/opencb/cellbase/lib/mongodb/serializer/MongoDBSerializer.java
index 938d04f11..d8fd3e3e1 100644
--- a/cellbase-mongodb/src/main/java/org/opencb/cellbase/lib/mongodb/serializer/MongoDBSerializer.java
+++ b/cellbase-mongodb/src/main/java/org/opencb/cellbase/lib/mongodb/serializer/MongoDBSerializer.java
@@ -11,10 +11,14 @@ import org.opencb.cellbase.lib.mongodb.serializer.converters.GeneConverter;
import org.opencb.cellbase.lib.mongodb.serializer.converters.VariantEffectConverter;
import org.opencb.cellbase.lib.mongodb.serializer.converters.VariationConverter;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.zip.GZIPOutputStream;
/**
@@ -61,7 +65,8 @@ public class MongoDBSerializer extends DefaultJsonSerializer {
public void serialize(Variation variation) {
try {
if(variationWriters.get(variation.getChromosome()) == null) {
- variationWriters.put(variation.getChromosome(), Files.newBufferedWriter(outdirPath.resolve("variation_chr" + variation.getChromosome() + ".json"), Charset.defaultCharset()));
+ Path outputFilePath = outdirPath.resolve("variation_chr" + variation.getChromosome() + ".json.gz");
+ variationWriters.put(variation.getChromosome(), new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(outputFilePath.toFile())))));
}
DBObject mongoDbDchema = variationConverter.convertToStorageSchema(variation);
variationWriters.get(variation.getChromosome()).write(jsonObjectWriter.writeValueAsString(mongoDbDchema)); | ['cellbase-mongodb/src/main/java/org/opencb/cellbase/lib/mongodb/serializer/MongoDBSerializer.java', 'cellbase-app/src/main/java/org/opencb/cellbase/app/transform/VariationParser.java', 'cellbase-core/src/main/java/org/opencb/cellbase/core/serializer/DefaultJsonSerializer.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,205,029 | 264,964 | 32,801 | 209 | 5,477 | 1,212 | 78 | 3 | 228 | 19 | 61 | 5 | 1 | 0 | 1970-01-01T00:23:42 | 79 | Java | {'Java': 3693062, 'JavaScript': 2458859, 'Python': 173114, 'Perl': 107545, 'CSS': 92703, 'R': 50286, 'Jupyter Notebook': 15424, 'Shell': 12696, 'HTML': 10388, 'Dockerfile': 4743, 'Mustache': 3207, 'Smarty': 396} | Apache License 2.0 |
366 | opencb/cellbase/160/139 | opencb | cellbase | https://github.com/opencb/cellbase/issues/139 | https://github.com/opencb/cellbase/pull/160 | https://github.com/opencb/cellbase/pull/160 | 1 | closes | Typo in RegionWS "clinical" | In RegionWSServer we can see the param "phenotpe" (should be "phenotype"):
```
@GET
@Path("/{chrRegionId}/clinical")
public Response getClinicalByRegion(@PathParam("chrRegionId") String query,
@DefaultValue("") @QueryParam("gene") String gene,
@DefaultValue("") @QueryParam("id") String id,
@DefaultValue("") @QueryParam("phenotpe") String phenotpe) {
```
There could be more typos like these in the WS annotations. Should we extract the String literals into Constants?
| 618ab4a8da1ffa12071ea89a2865c24477110b4a | 95fb14a5b8ef0da60a130f24067266571e8a4780 | https://github.com/opencb/cellbase/compare/618ab4a8da1ffa12071ea89a2865c24477110b4a...95fb14a5b8ef0da60a130f24067266571e8a4780 | diff --git a/cellbase-server/src/main/java/org/opencb/cellbase/server/ws/genomic/RegionWSServer.java b/cellbase-server/src/main/java/org/opencb/cellbase/server/ws/genomic/RegionWSServer.java
index 5bf840a67..6d21c79e0 100755
--- a/cellbase-server/src/main/java/org/opencb/cellbase/server/ws/genomic/RegionWSServer.java
+++ b/cellbase-server/src/main/java/org/opencb/cellbase/server/ws/genomic/RegionWSServer.java
@@ -252,7 +252,7 @@ public class RegionWSServer extends GenericRestWSServer {
public Response getClinicalByRegion(@PathParam("chrRegionId") String query,
@DefaultValue("") @QueryParam("gene") String gene,
@DefaultValue("") @QueryParam("id") String id,
- @DefaultValue("") @QueryParam("phenotpe") String phenotpe) {
+ @DefaultValue("") @QueryParam("phenotype") String phenotype) {
try {
parseQueryParams();
ClinicalDBAdaptor clinicalDBAdaptor = dbAdaptorFactory.getClinicalDBAdaptor(this.species, this.assembly);
@@ -266,8 +266,8 @@ public class RegionWSServer extends GenericRestWSServer {
if(id != null && !id.equals("")) {
queryOptions.add("id", Arrays.asList(id.split(",")));
}
- if(phenotpe != null && !phenotpe.equals("")) {
- queryOptions.add("phenotpe", Arrays.asList(phenotpe.split(",")));
+ if(phenotype != null && !phenotype.equals("")) {
+ queryOptions.add("phenotype", Arrays.asList(phenotype.split(",")));
}
// List<QueryResult> clinicalQueryResultList = clinicalDBAdaptor.getAllClinvarByRegionList(regions, queryOptions);
// List<QueryResult> queryResultList = new ArrayList<>(); | ['cellbase-server/src/main/java/org/opencb/cellbase/server/ws/genomic/RegionWSServer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,578,586 | 343,494 | 40,168 | 210 | 511 | 95 | 6 | 1 | 580 | 52 | 118 | 13 | 0 | 1 | 1970-01-01T00:24:02 | 79 | Java | {'Java': 3693062, 'JavaScript': 2458859, 'Python': 173114, 'Perl': 107545, 'CSS': 92703, 'R': 50286, 'Jupyter Notebook': 15424, 'Shell': 12696, 'HTML': 10388, 'Dockerfile': 4743, 'Mustache': 3207, 'Smarty': 396} | Apache License 2.0 |
365 | opencb/cellbase/161/158 | opencb | cellbase | https://github.com/opencb/cellbase/issues/158 | https://github.com/opencb/cellbase/pull/161 | https://github.com/opencb/cellbase/pull/161 | 1 | closes | Incorrect parameter description in 'build; command line help | The parameter data has this description:
Comma separated list of data to **download**: genome, gene, variation, regulation, protein, conservation, drug, clinvar, cosmic and GWAS CAatalog. 'all' build everything. [null]
'download' has to be replaced by 'build':
Comma separated list of data to **build**: genome, gene, variation, regulation, protein, conservation, drug, clinvar, cosmic and GWAS CAatalog. 'all' build everything. [null]
| 618ab4a8da1ffa12071ea89a2865c24477110b4a | 0e69744f5d7f47395decadb5ada91b5d89dd5bd7 | https://github.com/opencb/cellbase/compare/618ab4a8da1ffa12071ea89a2865c24477110b4a...0e69744f5d7f47395decadb5ada91b5d89dd5bd7 | diff --git a/cellbase-app/src/main/java/org/opencb/cellbase/app/cli/CliOptionsParser.java b/cellbase-app/src/main/java/org/opencb/cellbase/app/cli/CliOptionsParser.java
index 6e0fb33fa..ae72d14ed 100644
--- a/cellbase-app/src/main/java/org/opencb/cellbase/app/cli/CliOptionsParser.java
+++ b/cellbase-app/src/main/java/org/opencb/cellbase/app/cli/CliOptionsParser.java
@@ -144,7 +144,7 @@ public class CliOptionsParser {
public CommonCommandOptions commonOptions = commonCommandOptions;
- @Parameter(names = {"-d", "--data"}, description = "Comma separated list of data to download: genome, gene, variation, regulation, protein, conservation, drug, clinvar, cosmic and GWAS CAatalog. 'all' build everything.", required = true, arity = 1)
+ @Parameter(names = {"-d", "--data"}, description = "Comma separated list of data to build: genome, gene, variation, regulation, protein, conservation, drug, clinvar, cosmic and GWAS CAatalog. 'all' build everything.", required = true, arity = 1)
public String data;
@Parameter(names = {"-s", "--species"}, description = "Name of the species to be built, valid format include 'Homo sapiens' or 'hsapiens'", required = false, arity = 1) | ['cellbase-app/src/main/java/org/opencb/cellbase/app/cli/CliOptionsParser.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,578,586 | 343,494 | 40,168 | 210 | 512 | 126 | 2 | 1 | 438 | 59 | 104 | 7 | 0 | 0 | 1970-01-01T00:24:02 | 79 | Java | {'Java': 3693062, 'JavaScript': 2458859, 'Python': 173114, 'Perl': 107545, 'CSS': 92703, 'R': 50286, 'Jupyter Notebook': 15424, 'Shell': 12696, 'HTML': 10388, 'Dockerfile': 4743, 'Mustache': 3207, 'Smarty': 396} | Apache License 2.0 |
1,389 | checkmarx-ltd/cx-flow/850/628 | checkmarx-ltd | cx-flow | https://github.com/checkmarx-ltd/cx-flow/issues/628 | https://github.com/checkmarx-ltd/cx-flow/pull/850 | https://github.com/checkmarx-ltd/cx-flow/pull/850 | 1 | fixes | CxFlow creates duplicate tickets/issues when ran in batch mode. | ### Description
> The original problem is that cxflow is processing the scan results twice if we run it in a batch mode (using --project parameter).
This is evident from the log as well as if there is any bug tracker configured, there are duplicate tickets getting created.
### Expected Behavior
CxFlow should process the latest scan results only once.
### Actual Behavior
Scan Results getting processed twice.
### Reproduction
1) Run cxflow in cmd mode using the --project paramter.
For example : java -jar cxflow.jar --project --cx-team="\\Demo\\CxFlow\\" --cx-project="AnySASTProjectName" --bug-tracker=AnyBugTracker(Sarif or Jira)
--app="AnyAppName"
2) Check the log output and repeated result processing or issue creating can be observed.
**More Details** :
1) The issue seems to be occurring as a result of the method 'processResults' (From ResultsService) gets called on two different occasions.
2) Refer to the method 'publishLatestScanResults' from CxFlowRunner.java, add the breakpoints, and observe the behavior.
### Environment Details
> CxFlow version : 1.6.17
| 4e11bda17c663a0c8bf71b7b1ff8f364ebfa4fd5 | 077b901c17975e8d0b0658703b7c7ce1e5f44d6d | https://github.com/checkmarx-ltd/cx-flow/compare/4e11bda17c663a0c8bf71b7b1ff8f364ebfa4fd5...077b901c17975e8d0b0658703b7c7ce1e5f44d6d | diff --git a/src/main/java/com/checkmarx/flow/CxFlowRunner.java b/src/main/java/com/checkmarx/flow/CxFlowRunner.java
index ab6f935f..582cb5d8 100644
--- a/src/main/java/com/checkmarx/flow/CxFlowRunner.java
+++ b/src/main/java/com/checkmarx/flow/CxFlowRunner.java
@@ -584,8 +584,7 @@ public class CxFlowRunner implements ApplicationRunner {
}
private void publishLatestScanResults(ScanRequest request) throws ExitThrowable {
- ScanResults scanResults = runOnActiveScanners(scanner -> scanner.getLatestScanResults(request));
- processResults(request, scanResults);
+ runOnActiveScanners(scanner -> scanner.getLatestScanResults(request));
}
private void processResults(ScanRequest request, ScanResults results) throws ExitThrowable { | ['src/main/java/com/checkmarx/flow/CxFlowRunner.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,619,716 | 326,691 | 51,534 | 404 | 232 | 43 | 3 | 1 | 1,117 | 156 | 249 | 32 | 0 | 0 | 1970-01-01T00:27:14 | 78 | Java | {'Java': 3089142, 'Gherkin': 137222, 'HTML': 53644, 'Groovy': 4121, 'Dockerfile': 2287, 'Mustache': 1528} | Apache License 2.0 |
244 | xenit-eu/dynamic-extensions-for-alfresco/144/143 | xenit-eu | dynamic-extensions-for-alfresco | https://github.com/xenit-eu/dynamic-extensions-for-alfresco/issues/143 | https://github.com/xenit-eu/dynamic-extensions-for-alfresco/pull/144 | https://github.com/xenit-eu/dynamic-extensions-for-alfresco/pull/144 | 1 | fix | WebScriptUtil.extractHttpServletResponse() not working. | The extractHttpServletResponse method of the WebScriptUtil falsy returns null.
Cause:
AnnotationWebscriptResponse
```
@Override
public WebScriptResponse getNext() {
if (response instanceof WrappingWebScriptResponse) {
return ((WrappingWebScriptResponse) response).getNext();
} else {
return null;
}
}
``` | cde4af44900f0ba860fa637b9669f0adefbd181d | 9e98420149ace0744e73e1478d112ae909aa3f53 | https://github.com/xenit-eu/dynamic-extensions-for-alfresco/compare/cde4af44900f0ba860fa637b9669f0adefbd181d...9e98420149ace0744e73e1478d112ae909aa3f53 | diff --git a/webscripts/src/main/java/com/github/dynamicextensionsalfresco/webscripts/AnnotationWebscriptResponse.java b/webscripts/src/main/java/com/github/dynamicextensionsalfresco/webscripts/AnnotationWebscriptResponse.java
index ba7dff76..8eefaa73 100644
--- a/webscripts/src/main/java/com/github/dynamicextensionsalfresco/webscripts/AnnotationWebscriptResponse.java
+++ b/webscripts/src/main/java/com/github/dynamicextensionsalfresco/webscripts/AnnotationWebscriptResponse.java
@@ -41,7 +41,7 @@ public class AnnotationWebscriptResponse implements WrappingWebScriptResponse {
if (response instanceof WrappingWebScriptResponse) {
return ((WrappingWebScriptResponse) response).getNext();
} else {
- return null;
+ return response;
}
}
| ['webscripts/src/main/java/com/github/dynamicextensionsalfresco/webscripts/AnnotationWebscriptResponse.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 406,173 | 80,664 | 12,696 | 202 | 37 | 8 | 2 | 1 | 334 | 33 | 76 | 14 | 0 | 1 | 1970-01-01T00:25:05 | 77 | Java | {'Java': 828783, 'FreeMarker': 22656, 'Groovy': 10200, 'JavaScript': 3167, 'CSS': 1733} | Apache License 2.0 |
281 | ibissource/iaf/5261/5252 | ibissource | iaf | https://github.com/ibissource/iaf/issues/5252 | https://github.com/ibissource/iaf/pull/5261 | https://github.com/ibissource/iaf/pull/5261 | 1 | closes | Accept value */*;text/html; causes Json2XmlValidator to return XML instead of JSON | **❗ please do not add sensitive information in issues, you can provide extra information via email using issue number as reference ❗**
**Describe the issue**
When having an ApiListener with `produces="JSON"` and using the Json2XmlInputValidator to handle data conversion, we would like to respond with JSON.
When a consumer specifies the following Accept header: `*/*;text/html` the consumes check succeeds due to `*/*` but the following code block fails here and will return `text/html` as outputFormat.
Causing errors when translating response that text/html is not a valid value, should be JSON or XML.
Introduced in #5127:
![image](https://github.com/ibissource/iaf/assets/37303445/24346004-cf33-401c-a130-f825ddac7e7d)
![image](https://github.com/ibissource/iaf/assets/37303445/33f81da3-326f-46a6-b870-db0e3f2b62db)
| 8fcf102ec7f18fdb8a10a56a4795b2d5660142d3 | 937c2ef33c2b05c934406a199c26454ad9fef32a | https://github.com/ibissource/iaf/compare/8fcf102ec7f18fdb8a10a56a4795b2d5660142d3...937c2ef33c2b05c934406a199c26454ad9fef32a | diff --git a/core/src/main/java/nl/nn/adapterframework/pipes/Json2XmlValidator.java b/core/src/main/java/nl/nn/adapterframework/pipes/Json2XmlValidator.java
index 70d668310..d800e076f 100644
--- a/core/src/main/java/nl/nn/adapterframework/pipes/Json2XmlValidator.java
+++ b/core/src/main/java/nl/nn/adapterframework/pipes/Json2XmlValidator.java
@@ -146,10 +146,10 @@ public class Json2XmlValidator extends XmlValidator implements HasPhysicalDestin
}
/**
- * Default format has precedence over the accept header, accept header may be invalid or * slash *, in which case it should be ignored. First accept value wins.
+ * Default format has precedence over the accept header, accept header may be invalid or * slash *, in case of * slash * it means any content-type is accepted for the consumer, defaults to detected output format.
*/
private String parseAcceptHeader(DocumentFormat detectedFormat, String acceptHeaderValue) {
- if(StringUtils.isEmpty(acceptHeaderValue) || "*/*".equals(acceptHeaderValue)) {
+ if(StringUtils.isEmpty(acceptHeaderValue) || acceptHeaderValue.contains("*/*")) {
return detectedFormat.name();
}
| ['core/src/main/java/nl/nn/adapterframework/pipes/Json2XmlValidator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,990,285 | 1,600,466 | 196,456 | 1,188 | 545 | 110 | 4 | 1 | 841 | 97 | 221 | 15 | 2 | 0 | 1970-01-01T00:28:12 | 76 | Java | {'Java': 10343466, 'XSLT': 256796, 'JavaScript': 226501, 'HTML': 208226, 'CSS': 164473, 'SCSS': 79489, 'Less': 78481, 'Rich Text Format': 43789, 'Python': 14562, 'Batchfile': 8936, 'Dockerfile': 7406, 'PLSQL': 2825, 'TSQL': 2649, 'Shell': 1681, 'Roff': 1046, 'XQuery': 37} | Apache License 2.0 |
280 | ibissource/iaf/5190/5191 | ibissource | iaf | https://github.com/ibissource/iaf/issues/5191 | https://github.com/ibissource/iaf/pull/5190 | https://github.com/ibissource/iaf/pull/5190#issuecomment-1662309330 | 1 | closes | AWS S3 List does not work properly when > 1000 items in bucket | **❗ please do not add sensitive information in issues, you can provide extra information via email using issue number as reference ❗**
**Describe the issue**
The current implementation for the folderExists method in AWS S3 Filesystem is incorrect.
By calling `s3Client.listObjects(bucketName)` a truncated list of maximum 1000 items is retrieved, and the next 1000 are never retrieved.
![image](https://github.com/ibissource/iaf/assets/37303445/aaa701f3-99ca-42ed-adf2-56070c2de816)
Resulting in very silly folder not found errors:
![image](https://github.com/ibissource/iaf/assets/37303445/d9bd0ef1-09bc-4f98-8fee-b066562e606f)
Whilst these folders actually exist in s3 bucket, they are just not present in the first 1000 items :/ :/
Besides that, it is very in-efficient to iterate over an entire bucket to check if a folder exists.
It's better to call `listObjects` with a prefix to prevent retrieving a truncated list....
**Reporter**
Laurens
| 87b8d466b58a0ba923e3e0934f020ce8650914e3 | 4f2d4a0227c54e659c3ad0be97d289ee6308d3a5 | https://github.com/ibissource/iaf/compare/87b8d466b58a0ba923e3e0934f020ce8650914e3...4f2d4a0227c54e659c3ad0be97d289ee6308d3a5 | diff --git a/core/src/main/java/nl/nn/adapterframework/filesystem/AmazonS3FileSystem.java b/core/src/main/java/nl/nn/adapterframework/filesystem/AmazonS3FileSystem.java
index 05c0df1a7..c3e093794 100644
--- a/core/src/main/java/nl/nn/adapterframework/filesystem/AmazonS3FileSystem.java
+++ b/core/src/main/java/nl/nn/adapterframework/filesystem/AmazonS3FileSystem.java
@@ -348,7 +348,7 @@ public class AmazonS3FileSystem extends FileSystemBase<S3Object> implements IWri
@Override
public boolean folderExists(String folder) throws FileSystemException {
- ObjectListing objectListing = s3Client.listObjects(bucketName);
+ ObjectListing objectListing = s3Client.listObjects(bucketName, folder);
Iterator<S3ObjectSummary> objIter = objectListing.getObjectSummaries().iterator();
while (objIter.hasNext()) {
S3ObjectSummary s3ObjectSummary = objIter.next(); | ['core/src/main/java/nl/nn/adapterframework/filesystem/AmazonS3FileSystem.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,990,314 | 1,600,455 | 196,449 | 1,188 | 141 | 30 | 2 | 1 | 976 | 121 | 251 | 21 | 2 | 0 | 1970-01-01T00:28:10 | 76 | Java | {'Java': 10343466, 'XSLT': 256796, 'JavaScript': 226501, 'HTML': 208226, 'CSS': 164473, 'SCSS': 79489, 'Less': 78481, 'Rich Text Format': 43789, 'Python': 14562, 'Batchfile': 8936, 'Dockerfile': 7406, 'PLSQL': 2825, 'TSQL': 2649, 'Shell': 1681, 'Roff': 1046, 'XQuery': 37} | Apache License 2.0 |
9,873 | sirendii/advancedperipherals/452/444 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/444 | https://github.com/SirEndii/AdvancedPeripherals/pull/452 | https://github.com/SirEndii/AdvancedPeripherals/pull/452 | 2 | fixed | cardinal directions aren't working, contrary to the docs. | ### Describe
Mods: ONLY CC:Tweaked and Advanced Peripherals.
The inventory manager does not currently allow cardinal directions. (even though supposedly it should? according to the docs at least)
Please fix.
example script:
`manager = peripheral.find("inventoryManager")
manager.addItemToPlayer("UP", 50, -1)`
Do correct me if I am doing something wrong here, but yeah it appears to not work.
This also means that the example script on the docs won't work.
ALSO unrelated but the docs have the NBT version of the example in the non-nbt area and the non-nbt version in the nbt area.
### Steps to reproduce
1. get your CC setup. (put an inventory manager and such.)
2. Put a chest on top of the inventory manager,
3. put something in it
4. call `manager.addItemToPlayer("UP", 4, -1)`
5. It won't work because apparently the cardinal directions aren't enabled contrary to the docs.
### Multiplayer?
No
### Version
1.19.2-0.7.27r (Latest 1.19.2)
### Minecraft, Forge and maybe other related mods versions
Forge 43.2.8, Minecraft 1.19.2
### Screenshots or Videos
_No response_
### Crashlog/log
_No response_ | 2ce920b4e178d8dc88b1e4cefd0e02bce1d55e71 | 04b0d76a3d778239e5f4490fab00fc02c9df578e | https://github.com/sirendii/advancedperipherals/compare/2ce920b4e178d8dc88b1e4cefd0e02bce1d55e71...04b0d76a3d778239e5f4490fab00fc02c9df578e | diff --git a/src/main/java/de/srendi/advancedperipherals/common/util/CoordUtil.java b/src/main/java/de/srendi/advancedperipherals/common/util/CoordUtil.java
index 7b75ab26..a11c27a0 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/util/CoordUtil.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/util/CoordUtil.java
@@ -12,7 +12,7 @@ import net.minecraft.world.level.Level;
import net.minecraft.world.phys.AABB;
import org.jetbrains.annotations.NotNull;
-import java.util.Objects;
+import java.util.Locale;
public class CoordUtil {
@@ -49,32 +49,43 @@ public class CoordUtil {
}
public static Direction getDirection(FrontAndTop orientation, String computerSide) throws LuaException {
- if (Direction.byName(computerSide) != null) return Direction.byName(computerSide);
+ if (computerSide == null) {
+ throw new LuaException("null is not a valid side");
+ }
+
+ computerSide = computerSide.toLowerCase(Locale.ROOT);
+ Direction dir = Direction.byName(computerSide);
+ if (dir != null)
+ return dir;
Direction top = orientation.top();
Direction front = orientation.front();
+ final ComputerSide side = ComputerSide.valueOfInsensitive(computerSide);
+ if (side == null) {
+ throw new LuaException(computerSide + " is not a valid side");
+ }
+
if (front.getAxis() == Direction.Axis.Y) {
- if (front == Direction.UP) {
- if (Objects.equals(computerSide, ComputerSide.FRONT.toString())) return Direction.UP;
- if (Objects.equals(computerSide, ComputerSide.BACK.toString())) return Direction.DOWN;
+ switch (side) {
+ case FRONT: return front;
+ case BACK: return front.getOpposite();
+ case TOP: return top;
+ case BOTTOM: return top.getOpposite();
+ case RIGHT: return top.getClockWise();
+ case LEFT: return top.getCounterClockWise();
}
- if (front == Direction.DOWN) {
- if (Objects.equals(computerSide, ComputerSide.FRONT.toString())) return Direction.DOWN;
- if (Objects.equals(computerSide, ComputerSide.BACK.toString())) return Direction.UP;
+ } else {
+ switch (side) {
+ case FRONT: return front;
+ case BACK: return front.getOpposite();
+ case TOP: return Direction.UP;
+ case BOTTOM: return Direction.DOWN;
+ case RIGHT: return front.getCounterClockWise();
+ case LEFT: return front.getClockWise();
}
- if (Objects.equals(computerSide, ComputerSide.TOP.toString())) return top;
- if (Objects.equals(computerSide, ComputerSide.BOTTOM.toString())) return top.getOpposite();
- if (Objects.equals(computerSide, ComputerSide.RIGHT.toString())) return top.getClockWise();
- if (Objects.equals(computerSide, ComputerSide.LEFT.toString())) return top.getCounterClockWise();
}
- if (Objects.equals(computerSide, ComputerSide.FRONT.toString())) return front;
- if (Objects.equals(computerSide, ComputerSide.BACK.toString())) return front.getOpposite();
- if (Objects.equals(computerSide, ComputerSide.TOP.toString())) return Direction.UP;
- if (Objects.equals(computerSide, ComputerSide.BOTTOM.toString())) return Direction.DOWN;
- if (Objects.equals(computerSide, ComputerSide.RIGHT.toString())) return front.getCounterClockWise();
- if (Objects.equals(computerSide, ComputerSide.LEFT.toString())) return front.getClockWise();
- throw new LuaException(computerSide + " is not a valid side");
+ throw new LuaException(computerSide + " is not a expected side");
}
}
diff --git a/src/main/java/de/srendi/advancedperipherals/lib/peripherals/BasePeripheral.java b/src/main/java/de/srendi/advancedperipherals/lib/peripherals/BasePeripheral.java
index 81261a0b..73618696 100644
--- a/src/main/java/de/srendi/advancedperipherals/lib/peripherals/BasePeripheral.java
+++ b/src/main/java/de/srendi/advancedperipherals/lib/peripherals/BasePeripheral.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
-import java.util.Locale;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@@ -132,9 +131,7 @@ public abstract class BasePeripheral<O extends IPeripheralOwner> implements IBas
}
protected Direction validateSide(String direction) throws LuaException {
- String dir = direction.toUpperCase(Locale.ROOT);
-
- return CoordUtil.getDirection(owner.getOrientation(), dir);
+ return CoordUtil.getDirection(owner.getOrientation(), direction);
}
@Override | ['src/main/java/de/srendi/advancedperipherals/common/util/CoordUtil.java', 'src/main/java/de/srendi/advancedperipherals/lib/peripherals/BasePeripheral.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 653,893 | 138,107 | 16,157 | 227 | 3,217 | 617 | 54 | 2 | 1,166 | 172 | 287 | 45 | 0 | 0 | 1970-01-01T00:28:02 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
9,872 | sirendii/advancedperipherals/466/464 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/464 | https://github.com/SirEndii/AdvancedPeripherals/pull/466 | https://github.com/SirEndii/AdvancedPeripherals/pull/466 | 1 | resolve | Inventory manager causes items to stop stacking | ### Describe
I have a computer that is supplying me constantly with items. One of witch are apples. Then I noticed that apples in my inventory and apples in the me system don't stack anymore. I looked into the item data and the mod appends "tag:{}" to the data
### Steps to reproduce
1. have an apple in the inventory.
2. have a cc computer and the inventory manager attached and linked
3. put apple in main hand and run this: `/data get entity @s SelectedItem` . See that no `tag:{}` is there
4. call `manager.getItems()`
5. put apple in main hand and run this: `/data get entity @s SelectedItem` see that `tag:{}` appeared.
6. after the `tag:{}` got added see that the apple doesn't stack anymore with fresh items from creative inventory
### Multiplayer?
Yes
### Version
1.19.2-0.7.27r (Latest 1.19.2)
### Minecraft, Forge and maybe other related mods versions
Forge 43.2.0; Minecraft 1.19.2
### Screenshots or Videos
![image](https://github.com/SirEndii/AdvancedPeripherals/assets/67194495/9b0bf574-7336-41e0-bee7-c7a43ab1929f)
### Crashlog/log
_No response_ | ea263532fd4a7b327d116a3ae7f40a2c14c3ffaa | b37786303705d3be741f6291da903ae57132f2d5 | https://github.com/sirendii/advancedperipherals/compare/ea263532fd4a7b327d116a3ae7f40a2c14c3ffaa...b37786303705d3be741f6291da903ae57132f2d5 | diff --git a/src/main/java/de/srendi/advancedperipherals/common/util/LuaConverter.java b/src/main/java/de/srendi/advancedperipherals/common/util/LuaConverter.java
index cc0e81e4..2b12f1d9 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/util/LuaConverter.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/util/LuaConverter.java
@@ -69,7 +69,7 @@ public class LuaConverter {
public static Map<String, Object> stackToObject(@NotNull ItemStack stack) {
if (stack.isEmpty()) return new HashMap<>();
Map<String, Object> map = itemToObject(stack.getItem());
- CompoundTag nbt = stack.getOrCreateTag();
+ CompoundTag nbt = stack.copy().getOrCreateTag();
map.put("count", stack.getCount());
map.put("displayName", stack.getDisplayName().getString());
map.put("maxStackSize", stack.getMaxStackSize()); | ['src/main/java/de/srendi/advancedperipherals/common/util/LuaConverter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 655,683 | 138,594 | 16,208 | 227 | 108 | 24 | 2 | 1 | 1,087 | 163 | 298 | 35 | 1 | 0 | 1970-01-01T00:28:05 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
9,871 | sirendii/advancedperipherals/491/490 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/490 | https://github.com/SirEndii/AdvancedPeripherals/pull/491 | https://github.com/SirEndii/AdvancedPeripherals/pull/491 | 2 | resolve | ME Bridge inconsistencies | ### Describe
_[I don't know why the modpack author of my pack used the 1.19.3 version for a 1.19.2 pack but it works either way.]_
_{If I'm being stupid for any reason, (didnt see something in changelogs, shouldnt do 2 bugs in one post, etc.), please let me know.]_
While trying to write a program to push a live item counter to a Display Link, I encountered one bug and one potentially intended thing that might be a bug.
- getUsedItemStorage() does not account for items held within AE2 Things DISK Drives, it counts as zero.
- Any get[x]ItemStorage() does not account for items stored externally. (might be intended idk?)
### Steps to reproduce
Bug 1:
1. Have an ME system with a drive containing a DISK Drive
2. Attach an ME Bridge and computer
3. Insert items
4. Call getUsedItemStorage()
5. Returns zero.
Bug? 2:
1. Have an ME system with no drives and a storage bus on an inventory
2. Attach an ME bridge and computer
3. Insert items
4. Call any get[x]ItemStorage()
5. Returns zero.
### Multiplayer?
Yes
### Version
1.19.3-0.7.29r (Latest 1.19.3)
### Minecraft, Forge and maybe other related mods versions
Forge 43.2.14, Minecraft 1.19.2, AE2Things 1.1.1
### Screenshots or Videos
_No response_
### Crashlog/log
_No response_ | 15186b7687615202e3d6e3d848cc3df9d0746772 | 49ebb4d4c5d8edfe6ff486ffae548ccf8132196b | https://github.com/sirendii/advancedperipherals/compare/15186b7687615202e3d6e3d848cc3df9d0746772...49ebb4d4c5d8edfe6ff486ffae548ccf8132196b | diff --git a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
index d8a5a109..f87485fb 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
@@ -12,6 +12,7 @@ import appeng.api.storage.IStorageProvider;
import appeng.api.storage.MEStorage;
import appeng.blockentity.storage.DriveBlockEntity;
import appeng.items.storage.BasicStorageCell;
+import appeng.parts.storagebus.StorageBusPart;
import dan200.computercraft.shared.util.NBTUtil;
import de.srendi.advancedperipherals.AdvancedPeripherals;
import de.srendi.advancedperipherals.common.addons.APAddons;
@@ -328,7 +329,6 @@ public class AppEngApi {
Iterator<IGridNode> iterator = node.getGrid().getMachineNodes(DriveBlockEntity.class).iterator();
- if (!iterator.hasNext()) return 0;
while (iterator.hasNext()) {
DriveBlockEntity entity = (DriveBlockEntity) iterator.next().getService(IStorageProvider.class);
if (entity == null) continue;
@@ -354,6 +354,19 @@ public class AppEngApi {
}
}
+ iterator = node.getGrid().getMachineNodes(StorageBusPart.class).iterator();
+
+ while (iterator.hasNext()) {
+ StorageBusPart bus = (StorageBusPart) iterator.next().getService(IStorageProvider.class);
+ KeyCounter keyCounter = bus.getInternalHandler().getAvailableStacks();
+
+ for (Object2LongMap.Entry<AEKey> aeKey : keyCounter) {
+ if (aeKey.getKey() instanceof AEItemKey itemKey) {
+ used += aeKey.getLongValue();
+ }
+ }
+ }
+
return used;
}
@@ -362,7 +375,6 @@ public class AppEngApi {
Iterator<IGridNode> iterator = node.getGrid().getMachineNodes(DriveBlockEntity.class).iterator();
- if (!iterator.hasNext()) return 0;
while (iterator.hasNext()) {
DriveBlockEntity entity = (DriveBlockEntity) iterator.next().getService(IStorageProvider.class);
if (entity == null) continue;
@@ -392,6 +404,19 @@ public class AppEngApi {
}
}
+ iterator = node.getGrid().getMachineNodes(StorageBusPart.class).iterator();
+
+ while (iterator.hasNext()) {
+ StorageBusPart bus = (StorageBusPart) iterator.next().getService(IStorageProvider.class);
+ KeyCounter keyCounter = bus.getInternalHandler().getAvailableStacks();
+
+ for (Object2LongMap.Entry<AEKey> aeKey : keyCounter) {
+ if (aeKey.getKey() instanceof AEFluidKey fluidKey) {
+ used += aeKey.getLongValue();
+ }
+ }
+ }
+
return used;
}
| ['src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 674,400 | 142,452 | 16,590 | 229 | 1,233 | 248 | 29 | 1 | 1,296 | 211 | 348 | 47 | 0 | 0 | 1970-01-01T00:28:09 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
9,874 | sirendii/advancedperipherals/292/291 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/291 | https://github.com/SirEndii/AdvancedPeripherals/pull/292 | https://github.com/SirEndii/AdvancedPeripherals/pull/292 | 1 | closes | NullPointerException crash if trying to craft an item with 0 stock with ME bridge | ### Descripe
Own compiled build from current master as of writing, as AE Bridge is broken in latest release.
Game/server crashes if trying to craft an item is not in stock in the ME system.
I think the bug is here:
https://github.com/Seniorendi/AdvancedPeripherals/blob/8debb40a941977609bd80f9e9baf0b1f12a2f7d8/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java#L83
aeItem can be null, and .getRight is called just after. :)
### Steps to reproduce
Have a recipe in ME system, but no items in stock/in system.
```lua
me = peripheral.wrap("left")
me.craftItem({name="minecraft:oak_planks", count=10})
```
### Multiplayer?
No
### Version
Own compiled build.
### Minecraft, Forge and maybe other related mods versions
Forge 40.1.0, Minecraft 1.18.2, AE2 11.0.0-alpha.6
### Screenshots or Videos
_No response_
### Crashlog/log
https://pastebin.com/298NPaBF | 8debb40a941977609bd80f9e9baf0b1f12a2f7d8 | e20ffee72c30cf7eb6bb7502d5687f98908d2c7a | https://github.com/sirendii/advancedperipherals/compare/8debb40a941977609bd80f9e9baf0b1f12a2f7d8...e20ffee72c30cf7eb6bb7502d5687f98908d2c7a | diff --git a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
index 5c81a1a9..76db35db 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java
@@ -29,17 +29,32 @@ import java.util.Map;
public class AppEngApi {
- public static Pair<Long, AEItemKey> findAEStackFromItemStack(MEStorage monitor, ItemStack item) {
- Pair<Long, AEItemKey> stack = null;
+ public static Pair<Long, AEItemKey> findAEStackFromItemStack(MEStorage monitor, ICraftingService crafting, ItemStack item) {
for (Object2LongMap.Entry<AEKey> temp : monitor.getAvailableStacks()) {
if (temp.getKey() instanceof AEItemKey key) {
if (key.matches(item)) {
- stack = Pair.of(temp.getLongValue(), key);
- break;
+ return Pair.of(temp.getLongValue(), key);
}
}
}
- return stack;
+
+ if (crafting == null) {
+ return null;
+ }
+
+ for (var temp : crafting.getCraftables(param -> true)) {
+ if(temp instanceof AEItemKey key) {
+ if (key.matches(item)) {
+ return Pair.of(0L, key);
+ }
+ }
+ }
+
+ return null;
+ }
+
+ public static Pair<Long, AEItemKey> findAEStackFromItemStack(MEStorage monitor, ItemStack item) {
+ return findAEStackFromItemStack(monitor, null, item);
}
public static List<Object> listStacks(MEStorage monitor, ICraftingService service, int flag) {
diff --git a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java
index 74a36404..50f2bc18 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java
@@ -79,7 +79,13 @@ public class CraftJob implements ILuaCallback {
ICraftingService crafting = grid.getService(ICraftingService.class);
MEStorage monitor = storage.getInventory();
ItemStack itemstack = item;
- Pair<Long, AEItemKey> aeItem = AppEngApi.findAEStackFromItemStack(monitor, itemstack);
+ Pair<Long, AEItemKey> aeItem = AppEngApi.findAEStackFromItemStack(monitor, crafting, itemstack);
+
+ if (aeItem == null) {
+ AdvancedPeripherals.debug("Could not get AEItem from monitor", org.apache.logging.log4j.Level.FATAL);
+ return;
+ }
+
if (!crafting.isCraftable(aeItem.getRight())) {
fireEvent(false, item.getDescriptionId() + " is not craftable");
return; | ['src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/CraftJob.java', 'src/main/java/de/srendi/advancedperipherals/common/addons/appliedenergistics/AppEngApi.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 550,863 | 116,242 | 13,594 | 191 | 1,359 | 298 | 33 | 2 | 947 | 108 | 262 | 39 | 2 | 1 | 1970-01-01T00:27:30 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
9,875 | sirendii/advancedperipherals/253/251 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/251 | https://github.com/SirEndii/AdvancedPeripherals/pull/253 | https://github.com/SirEndii/AdvancedPeripherals/pull/253 | 1 | fix | [BUG] BlockReader gives no usefull data | ### Descripe
When I use the BlockRader, it only spits out useless data. I don't think this is intentional.
### Steps to reproduce
1. set up BlockReader with chest at the front.
2. put something into the chest as a test.
3. control the block reader with `getBlockData`
### Multiplayer?
Yes
### Version
0.7.8r (Latest)
### Minecraft, Forge and maybe other related mods versions
39.0.10
### Screenshots or Videos
https://imgur.com/a/KrgM9YR
### Crashlog/log
_No response_ | d3b8d1a82e093527f8fe5c5d019cf56c64a33236 | 763d54cebd403fc76f2bedcebd7a8ab6f244801c | https://github.com/sirendii/advancedperipherals/compare/d3b8d1a82e093527f8fe5c5d019cf56c64a33236...763d54cebd403fc76f2bedcebd7a8ab6f244801c | diff --git a/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/BlockReaderPeripheral.java b/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/BlockReaderPeripheral.java
index 8887e60f..350d7f18 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/BlockReaderPeripheral.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/BlockReaderPeripheral.java
@@ -10,6 +10,7 @@ import de.srendi.advancedperipherals.lib.peripherals.BasePeripheral;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.EntityBlock;
+import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockState;
public class BlockReaderPeripheral extends BasePeripheral<BlockEntityPeripheralOwner<BlockReaderTile>> {
@@ -36,8 +37,14 @@ public class BlockReaderPeripheral extends BasePeripheral<BlockEntityPeripheralO
public final Object getBlockData() {
if (getBlockInFront().is(Blocks.AIR) && !(getBlockInFront().getBlock() instanceof EntityBlock))
return null;
- return NBTUtil.toLua(getLevel().getBlockEntity(getPos().relative(getLevel().getBlockState(getPos()).
- getValue(APTileEntityBlock.FACING))).save(new CompoundTag()));
+ BlockEntity target = getLevel().getBlockEntity(
+ getPos().relative(
+ getLevel().getBlockState(getPos()).getValue(
+ APTileEntityBlock.FACING
+ )
+ )
+ );
+ return NBTUtil.toLua(target.save(target.saveWithoutMetadata()));
}
private BlockState getBlockInFront() { | ['src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/BlockReaderPeripheral.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 503,771 | 106,389 | 12,545 | 183 | 602 | 106 | 11 | 1 | 483 | 74 | 130 | 29 | 1 | 0 | 1970-01-01T00:27:21 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
9,876 | sirendii/advancedperipherals/209/203 | sirendii | advancedperipherals | https://github.com/SirEndii/AdvancedPeripherals/issues/203 | https://github.com/SirEndii/AdvancedPeripherals/pull/209 | https://github.com/SirEndii/AdvancedPeripherals/pull/209 | 1 | fix | Inventory Manager api, slot argument behaviour is assumedly incorrect. | ### Description
The Inventory Manager api is dumb/broken.
addItemToPlayer slot designation does not choose where to place an item and instead chooses from which slot to take the item in the originating inventory/chest.
This functionality is stupid because in the alternative function, removeItemFromPlayer, the slot designation chooses the slot in the inventory to take the item from. Thus, I assume the intended behaviour is for addItemToPlayer to also designate a slot in the player inventory to place it.
The api wiki also seems to suggest this is the case, so I assume something went horribly wrong and the behaviour was implemented incorrectly. please fix.
### Steps to reproduce
1) place down computer
2) make lua script to place an item in player inventory via inventory manager.
`manager.addItemToPlayer("UP", 1, 4, "minecraft:dirt")`
3) notice that instead of placing the dirt in slot 4, (in the hotbar according to removeItemFromPlayer), it instead takes the dirt from slot 4 from the chest, and places it in the first empty slot in your inventory.
-- > correct behaviour should be to take from any random slot with the item, or anything if no item designated, and to place said item into slot 4 of the inventory, or return an error if said slot is full/occupied. -1 slot designation should place it in the first empty slot in your inventory.
### Multiplayer?
Yes
### Version
0.7.2r (Latest)
### Minecraft, Forge and maybe other related mods versions
Forge 36.2.6, Minecraft 1.16.5
### Screenshots or Videos
_No response_
### Crashlog/log
_No response_ | 13ca9b60b45474d72a50dd9864ab6b122a0570c7 | 50b18c6b762e247cd18176b6bc293eda02575e59 | https://github.com/sirendii/advancedperipherals/compare/13ca9b60b45474d72a50dd9864ab6b122a0570c7...50b18c6b762e247cd18176b6bc293eda02575e59 | diff --git a/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/InventoryManagerPeripheral.java b/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/InventoryManagerPeripheral.java
index 8e3c7db9..4f35eea8 100644
--- a/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/InventoryManagerPeripheral.java
+++ b/src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/InventoryManagerPeripheral.java
@@ -52,11 +52,6 @@ public class InventoryManagerPeripheral extends BasePeripheral<TileEntityPeriphe
Item item1 = ItemUtil.getRegistryEntry(item.get(), ForgeRegistries.ITEMS);
stack = new ItemStack(item1, count);
}
- //With this, we can use the item parameter without need to use the slot parameter. If we don't want to use
- //the slot parameter, we can use -1
- int invSlot = -1;
- if (slot.isPresent() && slot.get() > 0)
- invSlot = slot.get();
Direction direction = validateSide(invDirection);
@@ -65,6 +60,13 @@ public class InventoryManagerPeripheral extends BasePeripheral<TileEntityPeriphe
.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, direction).resolve().orElse(null) : null;
PlayerInventory inventoryTo = getOwnerPlayer().inventory;
+
+
+ int invSlot = slot.orElse(0);
+
+ if (invSlot > inventoryTo.getContainerSize() || invSlot < 0)
+ throw new LuaException("Inventory out of bounds " + invSlot + " (max: " + (inventoryTo.getContainerSize()-1) + ")");
+
//inventoryTo is checked via ensurePlayerIsLinked()
if (inventoryFrom == null)
return 0;
@@ -72,71 +74,33 @@ public class InventoryManagerPeripheral extends BasePeripheral<TileEntityPeriphe
int amount = count;
int transferableAmount = 0;
- if (invSlot == -1)
- for (int i = 0; i < inventoryFrom.getSlots(); i++) {
- if (!stack.isEmpty())
- if (inventoryFrom.getStackInSlot(i).sameItem(stack)) {
- if (inventoryFrom.getStackInSlot(i).getCount() >= amount) {
- if (inventoryTo.add(inventoryFrom.extractItem(i, amount, true))) {
- inventoryFrom.extractItem(i, amount, false);
- transferableAmount += amount;
- }
- break;
- } else {
- int subcount = inventoryFrom.getStackInSlot(i).getCount();
- if (inventoryTo.add(inventoryFrom.extractItem(i, subcount, true))) {
- inventoryFrom.extractItem(i, subcount, false);
- amount = count - subcount;
- transferableAmount += subcount;
- }
- }
- }
+ for (int i = 0; i < inventoryFrom.getSlots() && amount > 0; i++) {
+ if (stack.isEmpty()) {
+ stack = inventoryFrom.getStackInSlot(i).copy();
if (stack.isEmpty())
- if (inventoryFrom.getStackInSlot(i).getCount() >= amount) {
- if (inventoryTo.add(inventoryFrom.extractItem(i, amount, true))) {
- inventoryFrom.extractItem(i, amount, false);
- transferableAmount += amount;
- }
- break;
- } else {
- int subcount = inventoryFrom.getStackInSlot(i).getCount();
- if (inventoryTo.add(inventoryFrom.extractItem(i, subcount, true))) {
- inventoryFrom.extractItem(i, subcount, false);
- amount = count - subcount;
- transferableAmount += subcount;
- }
- }
+ continue;
}
- if (invSlot != -1) {
- if (stack.isEmpty())
- if (inventoryFrom.getStackInSlot(slot.get()).getCount() >= amount) {
- if (inventoryTo.add(inventoryFrom.extractItem(slot.get(), amount, true))) {
- inventoryFrom.extractItem(slot.get(), amount, false);
- transferableAmount += amount;
+
+ if (inventoryFrom.getStackInSlot(i).sameItem(stack)) {
+ ItemStack invSlotItem = inventoryTo.getItem(invSlot);
+ int subcount = Math.min( inventoryFrom.getStackInSlot(i).getCount(), amount);
+ if (!invSlotItem.sameItem(stack) || invSlotItem.getCount() == invSlotItem.getMaxStackSize()) {
+ if (inventoryTo.add(inventoryFrom.extractItem(i, subcount, true))) {
+ transferableAmount += subcount;
+ amount -= subcount;
+ inventoryFrom.extractItem(i, subcount, false);
}
} else {
- int subcount = inventoryFrom.getStackInSlot(slot.get()).getCount();
- if (inventoryTo.add(inventoryFrom.extractItem(slot.get(), subcount, true))) {
- inventoryFrom.extractItem(slot.get(), subcount, false);
+ subcount = Math.min(subcount, stack.getMaxStackSize() - invSlotItem.getCount());
+ if (inventoryTo.add(invSlot, inventoryFrom.extractItem(i, subcount, true))) {
+ inventoryFrom.extractItem(i, subcount, false);
+ amount -= subcount;
transferableAmount += subcount;
}
}
- if (!stack.isEmpty())
- if (inventoryFrom.getStackInSlot(slot.get()).sameItem(stack)) {
- if (inventoryFrom.getStackInSlot(slot.get()).getCount() >= amount) {
- if (inventoryTo.add(inventoryFrom.extractItem(slot.get(), amount, true))) {
- inventoryFrom.extractItem(slot.get(), amount, false);
- transferableAmount += amount;
- }
- } else {
- int subcount = inventoryFrom.getStackInSlot(slot.get()).getCount();
- if (inventoryTo.add(inventoryFrom.extractItem(slot.get(), subcount, true))) {
- inventoryFrom.extractItem(slot.get(), subcount, false);
- transferableAmount += subcount;
- }
- }
- }
+ }
}
+
return transferableAmount;
}
| ['src/main/java/de/srendi/advancedperipherals/common/addons/computercraft/peripheral/InventoryManagerPeripheral.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 599,496 | 127,132 | 15,197 | 209 | 5,160 | 944 | 88 | 1 | 1,614 | 254 | 351 | 37 | 0 | 0 | 1970-01-01T00:27:14 | 74 | Java | {'Java': 681712, 'Lua': 2017} | Apache License 2.0 |
2,048 | telstra/open-kilda/4957/4956 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4956 | https://github.com/telstra/open-kilda/pull/4957 | https://github.com/telstra/open-kilda/pull/4957 | 1 | closes | YFlow CRUD operations use nano seconds unit for max_latency/max_latency_tier2 fields | Create/Update/Patch/Get operations must use milliseconds unit for max_latency/max_latency_tier2 fields.
**Steps to reproduce:**
1. check potential Y flow path and find its latency X.
2. Create/Update/Patch/Get Y flow with `path_computation_strategy=max_latency` and `max_latency=2*x`, `max_latency_tier2=3*x`
**Expected result:**
Created/Updated/Patched/Get flow
**Actual result:**
Path not found | 805fca1ee0ba8c3a0dca11938ec18a0741a65aca | 09025bf5a91724c2ef6a12adfc484471e33a1b89 | https://github.com/telstra/open-kilda/compare/805fca1ee0ba8c3a0dca11938ec18a0741a65aca...09025bf5a91724c2ef6a12adfc484471e33a1b89 | diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/converter/YFlowMapper.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/converter/YFlowMapper.java
index ec1c93cd2..5ad68c413 100644
--- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/converter/YFlowMapper.java
+++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/converter/YFlowMapper.java
@@ -78,6 +78,8 @@ public abstract class YFlowMapper {
@Mapping(target = "yFlowId", source = "YFlowId")
@Mapping(target = "yPoint", source = "YPoint")
@Mapping(target = "protectedPathYPoint", source = "protectedPathYPoint")
+ @Mapping(target = "maxLatency", qualifiedByName = "timeNanosToMillis")
+ @Mapping(target = "maxLatencyTier2", qualifiedByName = "timeNanosToMillis")
public abstract YFlow toYFlow(YFlowDto flow);
public abstract SubFlow toYFlow(SubFlowDto flow);
@@ -164,11 +166,17 @@ public abstract class YFlowMapper {
@Mapping(target = "type", constant = "CREATE")
@Mapping(target = "yFlowId", source = "YFlowId")
+ @Mapping(target = "maxLatency", qualifiedByName = "timeMillisToNanos")
+ @Mapping(target = "maxLatencyTier2", qualifiedByName = "timeMillisToNanos")
public abstract YFlowRequest toYFlowCreateRequest(YFlowCreatePayload source);
@Mapping(target = "type", constant = "UPDATE")
+ @Mapping(target = "maxLatency", qualifiedByName = "timeMillisToNanos")
+ @Mapping(target = "maxLatencyTier2", qualifiedByName = "timeMillisToNanos")
public abstract YFlowRequest toYFlowUpdateRequest(String yFlowId, YFlowUpdatePayload source);
+ @Mapping(target = "maxLatency", qualifiedByName = "timeMillisToNanos")
+ @Mapping(target = "maxLatencyTier2", qualifiedByName = "timeMillisToNanos")
public abstract YFlowPartialUpdateRequest toYFlowPatchRequest(String yFlowId, YFlowPatchPayload source);
@Mapping(target = "status", ignore = true)
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/converter/YFlowMapperTest.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/converter/YFlowMapperTest.java
index d877ba08a..f7231d63e 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/converter/YFlowMapperTest.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/converter/YFlowMapperTest.java
@@ -18,13 +18,39 @@ package org.openkilda.northbound.converter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
+import org.openkilda.messaging.command.yflow.SubFlowDto;
+import org.openkilda.messaging.command.yflow.SubFlowPartialUpdateDto;
+import org.openkilda.messaging.command.yflow.SubFlowSharedEndpointEncapsulation;
+import org.openkilda.messaging.command.yflow.YFlowDto;
+import org.openkilda.messaging.command.yflow.YFlowPartialUpdateRequest;
+import org.openkilda.messaging.command.yflow.YFlowRequest;
import org.openkilda.messaging.info.flow.SubFlowPingPayload;
import org.openkilda.messaging.info.flow.UniSubFlowPingPayload;
import org.openkilda.messaging.info.flow.YFlowPingResponse;
import org.openkilda.messaging.model.Ping.Errors;
+import org.openkilda.model.FlowEncapsulationType;
+import org.openkilda.model.FlowEndpoint;
+import org.openkilda.model.FlowStatus;
+import org.openkilda.model.PathComputationStrategy;
+import org.openkilda.model.SwitchId;
+import org.openkilda.northbound.dto.v2.flows.DetectConnectedDevicesV2;
+import org.openkilda.northbound.dto.v2.flows.FlowEndpointV2;
+import org.openkilda.northbound.dto.v2.flows.FlowPatchEndpoint;
+import org.openkilda.northbound.dto.v2.yflows.SubFlow;
+import org.openkilda.northbound.dto.v2.yflows.SubFlowPatchPayload;
+import org.openkilda.northbound.dto.v2.yflows.SubFlowUpdatePayload;
+import org.openkilda.northbound.dto.v2.yflows.YFlow;
+import org.openkilda.northbound.dto.v2.yflows.YFlowCreatePayload;
+import org.openkilda.northbound.dto.v2.yflows.YFlowPatchPayload;
+import org.openkilda.northbound.dto.v2.yflows.YFlowPatchSharedEndpoint;
+import org.openkilda.northbound.dto.v2.yflows.YFlowPatchSharedEndpointEncapsulation;
import org.openkilda.northbound.dto.v2.yflows.YFlowPingResult;
+import org.openkilda.northbound.dto.v2.yflows.YFlowSharedEndpoint;
+import org.openkilda.northbound.dto.v2.yflows.YFlowSharedEndpointEncapsulation;
+import org.openkilda.northbound.dto.v2.yflows.YFlowUpdatePayload;
import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
@@ -32,23 +58,202 @@ import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.test.context.junit4.SpringRunner;
+import java.time.Instant;
+
@RunWith(SpringRunner.class)
public class YFlowMapperTest {
public static final String ERROR_MESSAGE = "Error";
public static final String Y_FLOW_ID = "y_flow_id";
- public static final String SUB_FLOW_1 = "flow_1";
- public static final String SUB_FLOW_2 = "flow_2";
+ public static final String SUB_FLOW_1_NAME = "flow_1";
+ public static final String SUB_FLOW_2_NAME = "flow_2";
+ public static final String FLOW_3 = "flow_3";
+ public static final SwitchId SWITCH_ID_1 = new SwitchId(1);
+ public static final SwitchId SWITCH_ID_2 = new SwitchId(2);
+ public static final SwitchId SWITCH_ID_3 = new SwitchId(3);
+ public static final int PORT_1 = 1;
+ public static final int PORT_2 = 2;
+ public static final int PORT_3 = 3;
+ public static final int VLAN_1 = 4;
+ public static final int VLAN_2 = 5;
+ public static final int VLAN_3 = 6;
+ public static final int VLAN_4 = 7;
+ public static final int VLAN_5 = 8;
+ public static final int BANDWIDTH = 100;
+ public static final long MILLION = 1_000_000L;
+ public static final long MAX_LATENCY = 10L;
+ public static final long MAX_LATENCY_TIER_2 = 20L;
+ public static final int PRIORITY = 15;
+ public static final String DESC_1 = "desc1";
+ public static final String DESC_2 = "desc2";
+ public static final String DESC_3 = "desc3";
+ public static final SubFlowUpdatePayload SUB_FLOW_1 = new SubFlowUpdatePayload(SUB_FLOW_1_NAME,
+ new FlowEndpointV2(SWITCH_ID_1, PORT_1, VLAN_1),
+ new YFlowSharedEndpointEncapsulation(VLAN_4, VLAN_5), DESC_2);
+ public static final SubFlowUpdatePayload SUB_FLOW_2 = new SubFlowUpdatePayload(SUB_FLOW_2_NAME,
+ new FlowEndpointV2(SWITCH_ID_2, PORT_2, VLAN_2),
+ new YFlowSharedEndpointEncapsulation(VLAN_3, VLAN_4), DESC_3);
@Autowired
private YFlowMapper mapper;
+ @Test
+ public void createRequestTest() {
+ YFlowCreatePayload request = new YFlowCreatePayload(Y_FLOW_ID, new YFlowSharedEndpoint(SWITCH_ID_3, PORT_3),
+ BANDWIDTH, PathComputationStrategy.COST.name(), FlowEncapsulationType.VXLAN.name(), MAX_LATENCY,
+ MAX_LATENCY_TIER_2, true, false, true, PRIORITY, false, DESC_1, true, FLOW_3,
+ Lists.newArrayList(SUB_FLOW_1, SUB_FLOW_2));
+
+ YFlowRequest result = mapper.toYFlowCreateRequest(request);
+ assertEquals(request.getYFlowId(), result.getYFlowId());
+ assertEquals(request.getMaximumBandwidth(), result.getMaximumBandwidth());
+ assertEquals(request.getPathComputationStrategy(), result.getPathComputationStrategy().toString());
+ assertEquals(request.getEncapsulationType(), result.getEncapsulationType().toString());
+ assertEquals((Long) (request.getMaxLatency() * MILLION), result.getMaxLatency());
+ assertEquals((Long) (request.getMaxLatencyTier2() * MILLION), result.getMaxLatencyTier2());
+ assertEquals(request.isIgnoreBandwidth(), result.isIgnoreBandwidth());
+ assertEquals(request.isPeriodicPings(), result.isPeriodicPings());
+ assertEquals(request.isPinned(), result.isPinned());
+ assertEquals(request.getPriority(), result.getPriority());
+ assertEquals(request.isStrictBandwidth(), result.isStrictBandwidth());
+ assertEquals(request.getDescription(), result.getDescription());
+ assertEquals(request.isAllocateProtectedPath(), result.isAllocateProtectedPath());
+ assertEquals(request.getDiverseFlowId(), result.getDiverseFlowId());
+
+ assertEquals(2, result.getSubFlows().size());
+ assertSubFlow(request.getSubFlows().get(0), result.getSubFlows().get(0));
+ assertSubFlow(request.getSubFlows().get(1), result.getSubFlows().get(1));
+ }
+
+ @Test
+ public void updateRequestTest() {
+ YFlowUpdatePayload request = new YFlowUpdatePayload(new YFlowSharedEndpoint(SWITCH_ID_3, PORT_3),
+ BANDWIDTH, PathComputationStrategy.COST.name(), FlowEncapsulationType.VXLAN.name(), MAX_LATENCY,
+ MAX_LATENCY_TIER_2, true, false, true, PRIORITY, false, DESC_1, true, FLOW_3,
+ Lists.newArrayList(SUB_FLOW_1, SUB_FLOW_2));
+
+ YFlowRequest result = mapper.toYFlowUpdateRequest(Y_FLOW_ID, request);
+ assertEquals(Y_FLOW_ID, result.getYFlowId());
+ assertEquals(request.getMaximumBandwidth(), result.getMaximumBandwidth());
+ assertEquals(request.getPathComputationStrategy(), result.getPathComputationStrategy().toString());
+ assertEquals(request.getEncapsulationType(), result.getEncapsulationType().toString());
+ assertEquals((Long) (request.getMaxLatency() * MILLION), result.getMaxLatency());
+ assertEquals((Long) (request.getMaxLatencyTier2() * MILLION), result.getMaxLatencyTier2());
+ assertEquals(request.isIgnoreBandwidth(), result.isIgnoreBandwidth());
+ assertEquals(request.isPeriodicPings(), result.isPeriodicPings());
+ assertEquals(request.isPinned(), result.isPinned());
+ assertEquals(request.getPriority(), result.getPriority());
+ assertEquals(request.isStrictBandwidth(), result.isStrictBandwidth());
+ assertEquals(request.getDescription(), result.getDescription());
+ assertEquals(request.isAllocateProtectedPath(), result.isAllocateProtectedPath());
+ assertEquals(request.getDiverseFlowId(), result.getDiverseFlowId());
+
+ assertEquals(2, result.getSubFlows().size());
+ assertSubFlow(request.getSubFlows().get(0), result.getSubFlows().get(0));
+ assertSubFlow(request.getSubFlows().get(1), result.getSubFlows().get(1));
+ }
+
+ @Test
+ public void patchRequestTest() {
+ YFlowPatchPayload request = new YFlowPatchPayload(new YFlowPatchSharedEndpoint(SWITCH_ID_3, PORT_3),
+ (long) BANDWIDTH, PathComputationStrategy.COST.name(), FlowEncapsulationType.VXLAN.name(), MAX_LATENCY,
+ MAX_LATENCY_TIER_2, true, false, true, PRIORITY, false, DESC_1, true, FLOW_3,
+ Lists.newArrayList(
+ new SubFlowPatchPayload(SUB_FLOW_1_NAME,
+ new FlowPatchEndpoint(SWITCH_ID_1, PORT_1, VLAN_1, VLAN_2,
+ new DetectConnectedDevicesV2(true, true)),
+ new YFlowPatchSharedEndpointEncapsulation(VLAN_3, VLAN_4), DESC_2),
+ new SubFlowPatchPayload(SUB_FLOW_2_NAME,
+ new FlowPatchEndpoint(SWITCH_ID_2, PORT_2, VLAN_4, VLAN_5,
+ new DetectConnectedDevicesV2(true, true)),
+ new YFlowPatchSharedEndpointEncapsulation(VLAN_2, VLAN_3), DESC_3)));
+
+ YFlowPartialUpdateRequest result = mapper.toYFlowPatchRequest(Y_FLOW_ID, request);
+ assertEquals(Y_FLOW_ID, result.getYFlowId());
+ assertEquals(request.getMaximumBandwidth(), result.getMaximumBandwidth());
+ assertEquals(request.getPathComputationStrategy(), result.getPathComputationStrategy().toString());
+ assertEquals(request.getEncapsulationType(), result.getEncapsulationType().toString());
+ assertEquals((Long) (request.getMaxLatency() * MILLION), result.getMaxLatency());
+ assertEquals((Long) (request.getMaxLatencyTier2() * MILLION), result.getMaxLatencyTier2());
+ assertEquals(request.getIgnoreBandwidth(), result.getIgnoreBandwidth());
+ assertEquals(request.getPeriodicPings(), result.getPeriodicPings());
+ assertEquals(request.getPinned(), result.getPinned());
+ assertEquals(request.getPriority(), result.getPriority());
+ assertEquals(request.getStrictBandwidth(), result.getStrictBandwidth());
+ assertEquals(request.getDescription(), result.getDescription());
+ assertEquals(request.getAllocateProtectedPath(), result.getAllocateProtectedPath());
+ assertEquals(request.getDiverseFlowId(), result.getDiverseFlowId());
+
+ assertEquals(2, result.getSubFlows().size());
+ assertSubFlow(request.getSubFlows().get(0), result.getSubFlows().get(0));
+ assertSubFlow(request.getSubFlows().get(1), result.getSubFlows().get(1));
+ }
+
+ @Test
+ public void getResponseTest() {
+ YFlowDto response = new YFlowDto();
+ response.setYFlowId(Y_FLOW_ID);
+ response.setSharedEndpoint(new FlowEndpoint(SWITCH_ID_3, PORT_3));
+ response.setMaximumBandwidth(BANDWIDTH);
+ response.setPathComputationStrategy(PathComputationStrategy.COST);
+ response.setEncapsulationType(FlowEncapsulationType.VXLAN);
+ response.setMaxLatency(MAX_LATENCY * MILLION);
+ response.setMaxLatencyTier2(MAX_LATENCY_TIER_2 * MILLION);
+ response.setIgnoreBandwidth(true);
+ response.setPeriodicPings(false);
+ response.setPinned(true);
+ response.setPriority(PRIORITY);
+ response.setStrictBandwidth(false);
+ response.setDescription(DESC_1);
+ response.setAllocateProtectedPath(true);
+ response.setProtectedPathYPoint(SWITCH_ID_3);
+ response.setDiverseWithFlows(Sets.newHashSet(FLOW_3));
+ response.setSubFlows(Lists.newArrayList(
+ SubFlowDto.builder().flowId(SUB_FLOW_1_NAME)
+ .endpoint(new FlowEndpoint(SWITCH_ID_1, PORT_1))
+ .sharedEndpoint(new SubFlowSharedEndpointEncapsulation(VLAN_2, VLAN_3))
+ .status(FlowStatus.UP)
+ .description(DESC_2)
+ .timeCreate(Instant.MIN)
+ .timeUpdate(Instant.MAX)
+ .build(),
+ SubFlowDto.builder().flowId(SUB_FLOW_2_NAME)
+ .endpoint(new FlowEndpoint(SWITCH_ID_2, PORT_2))
+ .sharedEndpoint(new SubFlowSharedEndpointEncapsulation(VLAN_3, VLAN_4))
+ .status(FlowStatus.UP)
+ .description(DESC_3)
+ .timeCreate(Instant.MAX)
+ .timeUpdate(Instant.MIN)
+ .build()));
+
+ YFlow result = mapper.toYFlow(response);
+ assertEquals(Y_FLOW_ID, result.getYFlowId());
+ assertEquals(response.getMaximumBandwidth(), result.getMaximumBandwidth());
+ assertEquals(response.getPathComputationStrategy().toString().toLowerCase(),
+ result.getPathComputationStrategy());
+ assertEquals(response.getEncapsulationType().toString().toLowerCase(), result.getEncapsulationType());
+ assertEquals(MAX_LATENCY, result.getMaxLatency().longValue());
+ assertEquals(MAX_LATENCY_TIER_2, result.getMaxLatencyTier2().longValue());
+ assertEquals(response.isIgnoreBandwidth(), result.isIgnoreBandwidth());
+ assertEquals(response.isPeriodicPings(), result.isPeriodicPings());
+ assertEquals(response.isPinned(), result.isPinned());
+ assertEquals(response.getPriority(), result.getPriority());
+ assertEquals(response.isStrictBandwidth(), result.isStrictBandwidth());
+ assertEquals(response.getDescription(), result.getDescription());
+ assertEquals(response.isAllocateProtectedPath(), result.isAllocateProtectedPath());
+ assertEquals(response.getDiverseWithFlows(), result.getDiverseWithFlows());
+
+ assertEquals(2, result.getSubFlows().size());
+ assertSubFlow(response.getSubFlows().get(0), result.getSubFlows().get(0));
+ assertSubFlow(response.getSubFlows().get(1), result.getSubFlows().get(1));
+ }
+
@Test
public void pingResultTest() {
YFlowPingResponse response = new YFlowPingResponse(Y_FLOW_ID, false, ERROR_MESSAGE, Lists.newArrayList(
- new SubFlowPingPayload(SUB_FLOW_1,
+ new SubFlowPingPayload(SUB_FLOW_1_NAME,
new UniSubFlowPingPayload(true, null, 1),
new UniSubFlowPingPayload(false, Errors.TIMEOUT, 2)),
- new SubFlowPingPayload(SUB_FLOW_2,
+ new SubFlowPingPayload(SUB_FLOW_2_NAME,
new UniSubFlowPingPayload(false, Errors.DEST_NOT_AVAILABLE, 3),
new UniSubFlowPingPayload(true, null, 4))));
@@ -87,6 +292,39 @@ public class YFlowMapperTest {
}
}
+ private void assertSubFlow(SubFlowUpdatePayload expected, SubFlowDto actual) {
+ assertEquals(expected.getFlowId(), actual.getFlowId());
+ assertEquals(expected.getDescription(), actual.getDescription());
+ assertEquals(expected.getEndpoint().getSwitchId(), actual.getEndpoint().getSwitchId());
+ assertEquals(expected.getEndpoint().getPortNumber(), actual.getEndpoint().getPortNumber());
+ assertEquals(expected.getEndpoint().getVlanId(), actual.getEndpoint().getOuterVlanId());
+ assertEquals(expected.getEndpoint().getInnerVlanId(), actual.getEndpoint().getInnerVlanId());
+ assertEquals(expected.getSharedEndpoint().getVlanId(), actual.getSharedEndpoint().getVlanId());
+ assertEquals(expected.getSharedEndpoint().getInnerVlanId(), actual.getSharedEndpoint().getInnerVlanId());
+ }
+
+ private void assertSubFlow(SubFlowPatchPayload expected, SubFlowPartialUpdateDto actual) {
+ assertEquals(expected.getFlowId(), actual.getFlowId());
+ assertEquals(expected.getDescription(), actual.getDescription());
+ assertEquals(expected.getEndpoint().getSwitchId(), actual.getEndpoint().getSwitchId());
+ assertEquals(expected.getEndpoint().getPortNumber(), actual.getEndpoint().getPortNumber());
+ assertEquals(expected.getEndpoint().getVlanId(), actual.getEndpoint().getVlanId());
+ assertEquals(expected.getEndpoint().getInnerVlanId(), actual.getEndpoint().getInnerVlanId());
+ assertEquals(expected.getSharedEndpoint().getVlanId(), actual.getSharedEndpoint().getVlanId());
+ assertEquals(expected.getSharedEndpoint().getInnerVlanId(), actual.getSharedEndpoint().getInnerVlanId());
+ }
+
+ private void assertSubFlow(SubFlowDto expected, SubFlow actual) {
+ assertEquals(expected.getFlowId(), actual.getFlowId());
+ assertEquals(expected.getDescription(), actual.getDescription());
+ assertEquals(expected.getEndpoint().getSwitchId(), actual.getEndpoint().getSwitchId());
+ assertEquals(expected.getEndpoint().getPortNumber(), actual.getEndpoint().getPortNumber());
+ assertEquals(expected.getEndpoint().getOuterVlanId(), actual.getEndpoint().getVlanId());
+ assertEquals(expected.getEndpoint().getInnerVlanId(), actual.getEndpoint().getInnerVlanId());
+ assertEquals(expected.getSharedEndpoint().getVlanId(), actual.getSharedEndpoint().getVlanId());
+ assertEquals(expected.getSharedEndpoint().getInnerVlanId(), actual.getSharedEndpoint().getInnerVlanId());
+ }
+
@TestConfiguration
@ComponentScan({"org.openkilda.northbound.converter"})
static class Config { | ['src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/converter/YFlowMapper.java', 'src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/converter/YFlowMapperTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,204,386 | 2,338,758 | 286,979 | 3,056 | 627 | 168 | 8 | 1 | 411 | 41 | 102 | 12 | 0 | 0 | 1970-01-01T00:27:45 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,049 | telstra/open-kilda/4937/4938 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4938 | https://github.com/telstra/open-kilda/pull/4937 | https://github.com/telstra/open-kilda/pull/4937 | 1 | closes | Ingress mirror install excess pre ingress rule on switch | **Steps to reproduce:**
1. Create a flow
2. Create forward mirror on this flow
3. Get switch rules
**Expected result:**
one QinQ rule in table 1 (it has cookie `0x0080_0000_0***_****`)
**Actual result:**
two same QinQ rules in table 1. The only difference is rule priority | 720d3dcef0b5b07e46a26c4196e054a4f0142afe | 140ae5726272314a229e5fb7f1a63a3678c0c12d | https://github.com/telstra/open-kilda/compare/720d3dcef0b5b07e46a26c4196e054a4f0142afe...140ae5726272314a229e5fb7f1a63a3678c0c12d | diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentInstallCommand.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentInstallCommand.java
index a6f721c8b..3fed2ebda 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentInstallCommand.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentInstallCommand.java
@@ -15,27 +15,34 @@
package org.openkilda.floodlight.command.flow.ingress;
+import org.openkilda.floodlight.command.SpeakerCommandProcessor;
+import org.openkilda.floodlight.command.flow.FlowSegmentReport;
import org.openkilda.floodlight.command.flow.ingress.of.IngressFlowSegmentInstallMultiTableMirrorFlowModFactory;
import org.openkilda.floodlight.command.flow.ingress.of.IngressFlowSegmentInstallSingleTableMirrorFlowModFactory;
import org.openkilda.floodlight.model.FlowSegmentMetadata;
import org.openkilda.floodlight.model.RulesContext;
import org.openkilda.messaging.MessageContext;
+import org.openkilda.model.FlowEncapsulationType;
import org.openkilda.model.FlowEndpoint;
import org.openkilda.model.FlowTransitEncapsulation;
import org.openkilda.model.MeterConfig;
import org.openkilda.model.MirrorConfig;
+import org.openkilda.model.SwitchFeature;
import org.openkilda.model.SwitchId;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.collect.Sets;
import lombok.Getter;
+import java.util.List;
import java.util.Set;
import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
@Getter
@SuppressWarnings("squid:MaximumInheritanceDepth")
-public class IngressMirrorFlowSegmentInstallCommand extends IngressFlowSegmentInstallCommand {
+public class IngressMirrorFlowSegmentInstallCommand extends IngressFlowSegmentCommand {
@JsonCreator
public IngressMirrorFlowSegmentInstallCommand(
@JsonProperty("message_context") MessageContext context,
@@ -63,4 +70,28 @@ public class IngressMirrorFlowSegmentInstallCommand extends IngressFlowSegmentIn
new IngressFlowSegmentInstallSingleTableMirrorFlowModFactory(this, getSw(), getSwitchFeatures()));
}
}
+
+ @Override
+ protected List<Set<SwitchFeature>> getRequiredFeatures() {
+ List<Set<SwitchFeature>> required = super.getRequiredFeatures();
+ if (encapsulation.getType() == FlowEncapsulationType.VXLAN) {
+ required.add(Sets.newHashSet(
+ SwitchFeature.NOVIFLOW_PUSH_POP_VXLAN, SwitchFeature.KILDA_OVS_PUSH_POP_MATCH_VXLAN));
+ }
+ if (metadata.isMultiTable()) {
+ required.add(Sets.newHashSet(SwitchFeature.MULTI_TABLE));
+ }
+
+ return required;
+ }
+
+ @Override
+ protected CompletableFuture<FlowSegmentReport> makeExecutePlan(SpeakerCommandProcessor commandProcessor) {
+ return makeInstallPlan(commandProcessor);
+ }
+
+ @Override
+ protected SegmentAction getSegmentAction() {
+ return SegmentAction.INSTALL;
+ }
}
diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentRemoveCommand.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentRemoveCommand.java
index 91c507817..73a17240a 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentRemoveCommand.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentRemoveCommand.java
@@ -15,6 +15,8 @@
package org.openkilda.floodlight.command.flow.ingress;
+import org.openkilda.floodlight.command.SpeakerCommandProcessor;
+import org.openkilda.floodlight.command.flow.FlowSegmentReport;
import org.openkilda.floodlight.command.flow.ingress.of.IngressFlowSegmentRemoveMultiTableMirrorFlowModFactory;
import org.openkilda.floodlight.command.flow.ingress.of.IngressFlowSegmentRemoveSingleTableMirrorFlowModFactory;
import org.openkilda.floodlight.model.FlowSegmentMetadata;
@@ -30,9 +32,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Set;
import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
@SuppressWarnings("squid:MaximumInheritanceDepth")
-public class IngressMirrorFlowSegmentRemoveCommand extends IngressFlowSegmentRemoveCommand {
+public class IngressMirrorFlowSegmentRemoveCommand extends IngressFlowSegmentCommand {
public IngressMirrorFlowSegmentRemoveCommand(
@JsonProperty("message_context") MessageContext context,
@JsonProperty("command_id") UUID commandId,
@@ -59,4 +62,14 @@ public class IngressMirrorFlowSegmentRemoveCommand extends IngressFlowSegmentRem
new IngressFlowSegmentRemoveSingleTableMirrorFlowModFactory(this, getSw(), getSwitchFeatures()));
}
}
+
+ @Override
+ protected CompletableFuture<FlowSegmentReport> makeExecutePlan(SpeakerCommandProcessor commandProcessor) {
+ return makeRemovePlan(commandProcessor);
+ }
+
+ @Override
+ protected SegmentAction getSegmentAction() {
+ return SegmentAction.REMOVE;
+ }
} | ['src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentInstallCommand.java', 'src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/flow/ingress/IngressMirrorFlowSegmentRemoveCommand.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,199,502 | 2,337,781 | 286,879 | 3,056 | 2,024 | 400 | 48 | 2 | 284 | 45 | 81 | 10 | 0 | 0 | 1970-01-01T00:27:41 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,065 | telstra/open-kilda/4110/4106 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4106 | https://github.com/telstra/open-kilda/pull/4110 | https://github.com/telstra/open-kilda/pull/4110 | 1 | closes | Flow partial update clears 'inner_vlan_id' if not set | 1. Create flow with non-zero values in 'inner_vlan_id' fields
<details>
<summary>curl</summary>
```
curl --location --request POST 'http://localhost:8080/api/v2/flows' \\
--header 'Accept: application/json;charset=UTF-8' \\
--header 'Content-Type: application/json' \\
--header 'Authorization: Basic a2lsZGE6a2lsZGE=' \\
--header 'correlation_id: rtretiak_test1614949868' \\
--data-raw '{
"flow_id": "rtrtk_test",
"source": {
"switch_id": "1",
"port_number": 13,
"vlan_id": 111,
"inner_vlan_id": 123
},
"destination": {
"switch_id": "2",
"port_number": 14,
"vlan_id": 111,
"inner_vlan_id": 123
},
"maximum_bandwidth": 100,
"ignore_bandwidth": false,
"allocate_protected_path": false,
"encapsulation_type": "transit_vlan",
"description": "autotest flow with weight loss, unexplained"
}'
```
</details>
2. Update flow using partial update API (`PATCH {{northbound.endpoint}}/api/v2/flows/:flowId`) without mentioning 'inner_vlan_id' fields. For example, update only source port number
<details>
<summary>curl</summary>
```
curl --location --request PATCH 'http://localhost:8080/api/v2/flows/rtrtk_test' \\
--header 'Accept: application/json;charset=UTF-8' \\
--header 'Content-Type: application/json' \\
--header 'correlation_id: rtrtk1614950136' \\
--header 'Authorization: Basic a2lsZGE6a2lsZGE=' \\
--data-raw '{
"source":{
"port_number": 14
}
}'
```
</details>
**Expected:** Only source port number is updated
**Actual:** 'inner_vlan_id' both on src and dst is set to 0. Port number is updated as expected
| 8158bf1cd10cce6e00373811a006db57cabf7966 | 25ba661f2af4173fc2b4c692130db55c59fbbe60 | https://github.com/telstra/open-kilda/compare/8158bf1cd10cce6e00373811a006db57cabf7966...25ba661f2af4173fc2b4c692130db55c59fbbe60 | diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/RequestedFlowMapper.java b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/RequestedFlowMapper.java
index a14c2169a..0b4cfa089 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/RequestedFlowMapper.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/RequestedFlowMapper.java
@@ -34,9 +34,9 @@ public abstract class RequestedFlowMapper {
*/
@Mapping(target = "flowId", source = "flowId")
@Mapping(target = "source", expression = "java(new FlowEndpoint(flow.getSrcSwitchId(), "
- + "flow.getSrcPort(), flow.getSrcVlan()))")
+ + "flow.getSrcPort(), flow.getSrcVlan(), flow.getSrcInnerVlan()))")
@Mapping(target = "destination", expression = "java(new FlowEndpoint(flow.getDestSwitchId(), "
- + "flow.getDestPort(), flow.getDestVlan()))")
+ + "flow.getDestPort(), flow.getDestVlan(), flow.getDestInnerVlan()))")
@Mapping(target = "encapsulationType", source = "encapsulationType")
@Mapping(target = "pathComputationStrategy",
expression = "java(java.util.Optional.ofNullable(flow.getPathComputationStrategy())"
diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/share/mappers/RequestedFlowMapperTest.java b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/share/mappers/RequestedFlowMapperTest.java
index 185d74aa5..077ccd99d 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/share/mappers/RequestedFlowMapperTest.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/share/mappers/RequestedFlowMapperTest.java
@@ -37,7 +37,9 @@ public class RequestedFlowMapperTest {
public static final Integer DST_PORT = 2;
public static final int SRC_VLAN = 3;
public static final int DST_VLAN = 4;
- public static final Integer PRIORITY = 5;
+ public static final int SRC_INNER_VLAN = 5;
+ public static final int DST_INNER_VLAN = 6;
+ public static final Integer PRIORITY = 7;
public static final String DESCRIPTION = "description";
public static final int BANDWIDTH = 1000;
public static final Long MAX_LATENCY = 200L;
@@ -48,9 +50,11 @@ public class RequestedFlowMapperTest {
.srcSwitch(Switch.builder().switchId(SRC_SWITCH_ID).build())
.srcPort(SRC_PORT)
.srcVlan(SRC_VLAN)
+ .srcInnerVlan(SRC_INNER_VLAN)
.destSwitch(Switch.builder().switchId(DST_SWITCH_ID).build())
.destPort(DST_PORT)
.destVlan(DST_VLAN)
+ .destInnerVlan(DST_INNER_VLAN)
.priority(PRIORITY)
.description(DESCRIPTION)
.bandwidth(BANDWIDTH)
@@ -74,9 +78,11 @@ public class RequestedFlowMapperTest {
assertEquals(SRC_SWITCH_ID, flowRequest.getSource().getSwitchId());
assertEquals(SRC_PORT, flowRequest.getSource().getPortNumber());
assertEquals(SRC_VLAN, flowRequest.getSource().getOuterVlanId());
+ assertEquals(SRC_INNER_VLAN, flowRequest.getSource().getInnerVlanId());
assertEquals(DST_SWITCH_ID, flowRequest.getDestination().getSwitchId());
assertEquals(DST_PORT, flowRequest.getDestination().getPortNumber());
assertEquals(DST_VLAN, flowRequest.getDestination().getOuterVlanId());
+ assertEquals(DST_INNER_VLAN, flowRequest.getDestination().getInnerVlanId());
assertEquals(PRIORITY, flowRequest.getPriority());
assertEquals(DESCRIPTION, flowRequest.getDescription());
assertEquals(BANDWIDTH, flowRequest.getBandwidth()); | ['src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/RequestedFlowMapper.java', 'src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/share/mappers/RequestedFlowMapperTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 8,369,449 | 1,747,735 | 219,322 | 2,361 | 280 | 74 | 4 | 1 | 1,680 | 158 | 471 | 54 | 2 | 2 | 1970-01-01T00:26:55 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,064 | telstra/open-kilda/4122/4142 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4142 | https://github.com/telstra/open-kilda/pull/4122 | https://github.com/telstra/open-kilda/pull/4122 | 1 | closes | WARNs form flowmonitoring topology `The command context is missing in input tuple` | There are a lot of warns form flow monitoring topology like `The command context is missing in input tuple`
Need to add command context into topology tuples | f75c9d68ede317f9d06529ac0cba49640417f97c | a177c73099ea1c2b605ab9cc3238fae288ee663f | https://github.com/telstra/open-kilda/compare/f75c9d68ede317f9d06529ac0cba49640417f97c...a177c73099ea1c2b605ab9cc3238fae288ee663f | diff --git a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/FlowCacheBolt.java b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/FlowCacheBolt.java
index 02ec248a0..74f099780 100644
--- a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/FlowCacheBolt.java
+++ b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/FlowCacheBolt.java
@@ -90,22 +90,23 @@ public class FlowCacheBolt extends AbstractBolt implements FlowCacheBoltCarrier
@Override
public void emitCalculateFlowLatencyRequest(String flowId, FlowDirection direction,
List<Link> flowPath, Long maxLatency, Long maxLatencyTier2) {
- emit(getCurrentTuple(), new Values(flowId, direction, flowPath, maxLatency, maxLatencyTier2));
+ emit(getCurrentTuple(), new Values(flowId, direction, flowPath, maxLatency, maxLatencyTier2,
+ getCommandContext()));
}
@Override
public void emitCheckFlowLatencyRequest(String flowId, FlowDirection direction, long latency,
Long maxLatency, Long maxLatencyTier2) {
emit(ACTION_STREAM_ID.name(), getCurrentTuple(), new Values(flowId, direction, latency,
- maxLatency, maxLatencyTier2));
+ maxLatency, maxLatencyTier2, getCommandContext()));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FLOW_ID_FIELD, FLOW_DIRECTION_FIELD,
- FLOW_PATH_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD));
+ FLOW_PATH_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD, FIELD_ID_CONTEXT));
declarer.declareStream(ACTION_STREAM_ID.name(), new Fields(FLOW_ID_FIELD, FLOW_DIRECTION_FIELD,
- LATENCY_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD));
+ LATENCY_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD, FIELD_ID_CONTEXT));
declarer.declareStream(ZkStreams.ZK.toString(), new Fields(ZooKeeperBolt.FIELD_ID_STATE,
ZooKeeperBolt.FIELD_ID_CONTEXT));
}
diff --git a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/IslCacheBolt.java b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/IslCacheBolt.java
index 8141107e3..0faf7617e 100644
--- a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/IslCacheBolt.java
+++ b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/IslCacheBolt.java
@@ -117,7 +117,8 @@ public class IslCacheBolt extends AbstractBolt {
long latency = islCacheService.calculateLatencyForPath(flowPath);
- emit(ACTION_STREAM_ID.name(), input, new Values(flowId, direction, latency, maxLatency, maxLatencyTier2));
+ emit(ACTION_STREAM_ID.name(), input, new Values(flowId, direction, latency, maxLatency, maxLatencyTier2,
+ getCommandContext()));
} else {
unhandledInput(input);
}
@@ -126,7 +127,7 @@ public class IslCacheBolt extends AbstractBolt {
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream(ACTION_STREAM_ID.name(), new Fields(FLOW_ID_FIELD, FLOW_DIRECTION_FIELD,
- LATENCY_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD));
+ LATENCY_FIELD, MAX_LATENCY_FIELD, MAX_LATENCY_TIER_2_FIELD, FIELD_ID_CONTEXT));
declarer.declareStream(ZkStreams.ZK.toString(), new Fields(ZooKeeperBolt.FIELD_ID_STATE,
ZooKeeperBolt.FIELD_ID_CONTEXT));
} | ['src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/FlowCacheBolt.java', 'src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/bolts/IslCacheBolt.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 8,399,709 | 1,753,986 | 219,898 | 2,365 | 1,175 | 274 | 14 | 2 | 160 | 27 | 31 | 3 | 0 | 0 | 1970-01-01T00:26:55 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,051 | telstra/open-kilda/4871/4788 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4788 | https://github.com/telstra/open-kilda/pull/4871 | https://github.com/telstra/open-kilda/pull/4871 | 1 | resolves | Swap Endpoints operation response timeout if flow update is disabled in toggles | **Steps to reproduce:**
1. Disable `update_flow` in feature toggles
2. Send swap endpoint request `/v2/flows/swap-endpoint` (you can send it even for non existing flows)
**Expected result:**
Response with error
**Actual result:**
Response with timeout
| 89c7a0b92047a86aac271b6871b8772c1a063572 | 33e5fda32e4e699d12e42044fcab29c1e29f04c3 | https://github.com/telstra/open-kilda/compare/89c7a0b92047a86aac271b6871b8772c1a063572...33e5fda32e4e699d12e42044fcab29c1e29f04c3 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowProcessingWithHistorySupportFsm.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowProcessingWithHistorySupportFsm.java
index b475fd724..28dcf27c9 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowProcessingWithHistorySupportFsm.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowProcessingWithHistorySupportFsm.java
@@ -51,18 +51,26 @@ public abstract class FlowProcessingWithHistorySupportFsm<T extends StateMachine
super(nextEvent, errorEvent, commandContext, carrier, eventListeners);
}
+ protected String[] getFlowIdsForHistory() {
+ return new String[]{getFlowId()};
+ }
+
/**
* Add a history record on the action.
*/
public void saveActionToHistory(String action) {
- log.debug("Flow {} action - {}", getFlowId(), action);
- sendHistoryData(action, null);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.debug("Flow {} action - {}", flowId, action);
+ sendHistoryData(flowId, action, null);
+ }
}
@Override
public void saveActionToHistory(String action, String description) {
- log.debug("Flow {} action - {} : {}", getFlowId(), action, description);
- sendHistoryData(action, description);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.debug("Flow {} action - {} : {}", flowId, action, description);
+ sendHistoryData(flowId, action, description);
+ }
}
/**
@@ -85,26 +93,32 @@ public abstract class FlowProcessingWithHistorySupportFsm<T extends StateMachine
@Override
public void saveErrorToHistory(String action, String errorMessage) {
- log.error("Flow {} error - {} : {}", getFlowId(), action, errorMessage);
- sendHistoryData(action, errorMessage);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.error("Flow {} error - {} : {}", flowId, action, errorMessage);
+ sendHistoryData(flowId, action, errorMessage);
+ }
}
@Override
public void saveErrorToHistory(String errorMessage) {
- log.error("Flow {} error - {}", getFlowId(), errorMessage);
- sendHistoryData(errorMessage, null);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.error("Flow {} error - {}", flowId, errorMessage);
+ sendHistoryData(flowId, errorMessage, null);
+ }
}
/**
* Add a history record on the error.
*/
public void saveErrorToHistory(String errorMessage, Exception ex) {
- log.error("Flow {} error - {}", getFlowId(), errorMessage, ex);
- sendHistoryData(errorMessage, null);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.error("Flow {} error - {}", flowId, errorMessage, ex);
+ sendHistoryData(flowId, errorMessage, null);
+ }
}
- protected void sendHistoryData(String action, String description) {
- sendHistoryData(getFlowId(), action, description, getCommandContext().getCorrelationId());
+ protected void sendHistoryData(String flowId, String action, String description) {
+ sendHistoryData(flowId, action, description, getCommandContext().getCorrelationId());
}
protected void sendHistoryData(String flowId, String action, String description, String taskId) {
@@ -141,7 +155,9 @@ public abstract class FlowProcessingWithHistorySupportFsm<T extends StateMachine
public void saveNewEventToHistory(String action, FlowEventData.Event event,
FlowEventData.Initiator initiator,
String details) {
- saveNewEventToHistory(getFlowId(), action, event, initiator, details, getCommandContext().getCorrelationId());
+ for (String flowId : getFlowIdsForHistory()) {
+ saveNewEventToHistory(flowId, action, event, initiator, details, getCommandContext().getCorrelationId());
+ }
}
/**
@@ -176,19 +192,21 @@ public abstract class FlowProcessingWithHistorySupportFsm<T extends StateMachine
*/
public void saveActionWithDumpToHistory(String action, String description,
FlowDumpData flowDumpData) {
- log.debug("Flow {} action - {} : {}", getFlowId(), action, description);
-
- FlowHistoryHolder historyHolder = FlowHistoryHolder.builder()
- .taskId(getCommandContext().getCorrelationId())
- .flowDumpData(flowDumpData)
- .flowHistoryData(FlowHistoryData.builder()
- .action(action)
- .time(getNextHistoryEntryTime())
- .description(description)
- .flowId(getFlowId())
- .build())
- .build();
- getCarrier().sendHistoryUpdate(historyHolder);
+ for (String flowId : getFlowIdsForHistory()) {
+ log.debug("Flow {} action - {} : {}", flowId, action, description);
+
+ FlowHistoryHolder historyHolder = FlowHistoryHolder.builder()
+ .taskId(getCommandContext().getCorrelationId())
+ .flowDumpData(flowDumpData)
+ .flowHistoryData(FlowHistoryData.builder()
+ .action(action)
+ .time(getNextHistoryEntryTime())
+ .description(description)
+ .flowId(flowId)
+ .build())
+ .build();
+ getCarrier().sendHistoryUpdate(historyHolder);
+ }
}
public final Instant getNextHistoryEntryTime() {
@@ -205,8 +223,10 @@ public abstract class FlowProcessingWithHistorySupportFsm<T extends StateMachine
}
public void saveGlobalTimeoutToHistory() {
- saveErrorToHistory(String.format(
- "Global timeout reached for %s operation on flow \\"%s\\"", getCrudActionName(), getFlowId()));
+ for (String flowId : getFlowIdsForHistory()) {
+ saveErrorToHistory(String.format(
+ "Global timeout reached for %s operation on flow \\"%s\\"", getCrudActionName(), flowId));
+ }
}
protected abstract String getCrudActionName();
diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/FlowSwapEndpointsFsm.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/FlowSwapEndpointsFsm.java
index 85b49d9c7..80460c47e 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/FlowSwapEndpointsFsm.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/FlowSwapEndpointsFsm.java
@@ -84,6 +84,11 @@ public class FlowSwapEndpointsFsm extends FlowProcessingWithHistorySupportFsm<Fl
throw new UnsupportedOperationException("Not implemented for swap flow endpoints operation. Skipping");
}
+ @Override
+ protected String[] getFlowIdsForHistory() {
+ return new String[]{firstFlowId, secondFlowId};
+ }
+
@Override
protected String getCrudActionName() {
return "swap-endpoints";
diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/actions/ValidateFlowsAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/actions/ValidateFlowsAction.java
index ab3211b19..7fe710184 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/actions/ValidateFlowsAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/actions/ValidateFlowsAction.java
@@ -58,7 +58,9 @@ public class ValidateFlowsAction
RequestedFlow secondTargetFlow = stateMachine.getSecondTargetFlow();
if (!featureTogglesRepository.getOrDefault().getUpdateFlowEnabled()) {
- throw new FlowProcessingException(ErrorType.NOT_PERMITTED, "Flow update feature is disabled");
+ stateMachine.fireValidationError(
+ new ErrorData(ErrorType.NOT_PERMITTED, FlowSwapEndpointsFsm.GENERIC_ERROR_MESSAGE,
+ "Flow update feature is disabled"));
}
try { | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowProcessingWithHistorySupportFsm.java', 'src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/FlowSwapEndpointsFsm.java', 'src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/swapendpoints/actions/ValidateFlowsAction.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 11,184,813 | 2,334,663 | 286,564 | 3,054 | 4,560 | 894 | 85 | 3 | 264 | 34 | 63 | 10 | 0 | 0 | 1970-01-01T00:27:37 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,063 | telstra/open-kilda/4124/4141 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4141 | https://github.com/telstra/open-kilda/pull/4124 | https://github.com/telstra/open-kilda/pull/4124 | 1 | closes | Incorrect WARN messages `violates latency SLA` | Messages like
`Forward path for flow 'someflow' violates latency SLA 1000/0 ms` shouldn't be logged if maxLatency for flow is 0.
Also latency in log is incorrect. it must be in milliseconds but in fact it is in microseconds | f75c9d68ede317f9d06529ac0cba49640417f97c | 287211bd4712a1344cfae1b76cf1f7dd74fade57 | https://github.com/telstra/open-kilda/compare/f75c9d68ede317f9d06529ac0cba49640417f97c...287211bd4712a1344cfae1b76cf1f7dd74fade57 | diff --git a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/service/ActionService.java b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/service/ActionService.java
index 6b2918ab7..3163453c7 100644
--- a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/service/ActionService.java
+++ b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/service/ActionService.java
@@ -19,6 +19,8 @@ import org.openkilda.server42.messaging.FlowDirection;
import lombok.extern.slf4j.Slf4j;
+import java.util.concurrent.TimeUnit;
+
@Slf4j
public class ActionService {
@@ -27,9 +29,9 @@ public class ActionService {
*/
public void checkFlowSla(String flowId, FlowDirection direction, long latency,
Long maxLatency, Long maxLatencyTier2) {
- if (maxLatency != null && latency > maxLatency) {
- long latencyMs = latency / 1000;
- long maxLatencyMs = maxLatency / 1000;
+ if (maxLatency != null && maxLatency > 0 && latency > maxLatency) {
+ long latencyMs = TimeUnit.NANOSECONDS.toMillis(latency);
+ long maxLatencyMs = TimeUnit.NANOSECONDS.toMillis(maxLatency);
log.warn("{} path for flow '{}' violates latency SLA {}/{} ms",
direction.name(), flowId, latencyMs, maxLatencyMs);
} | ['src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/service/ActionService.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 8,399,709 | 1,753,986 | 219,898 | 2,365 | 420 | 104 | 8 | 1 | 227 | 39 | 57 | 3 | 0 | 0 | 1970-01-01T00:26:55 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,047 | telstra/open-kilda/4977/4976 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4976 | https://github.com/telstra/open-kilda/pull/4977 | https://github.com/telstra/open-kilda/pull/4977 | 1 | closes | Create LAG without `lacp_reply` in request causes 500 response | **Steps to reproduce:**
1. send request `POST /api/v2/switches/{switch_id}/lags` with body `{ "port_numbers": [12, 14]}` (without `lacp_reply`)
**Expected result:**
1. lacp_reply will be set to true by default
2. LAG will be created
**Actual result:**
500 Internal server error | 053438afae1b25a9fccf3492dcac22fcaa3bf4bf | f613b32c45986329ef0849697b2633965e01806c | https://github.com/telstra/open-kilda/compare/053438afae1b25a9fccf3492dcac22fcaa3bf4bf...f613b32c45986329ef0849697b2633965e01806c | diff --git a/src-java/northbound-service/northbound-api/src/main/java/org/openkilda/northbound/dto/v2/switches/LagPortRequest.java b/src-java/northbound-service/northbound-api/src/main/java/org/openkilda/northbound/dto/v2/switches/LagPortRequest.java
index 8d4362bd6..0c278e745 100644
--- a/src-java/northbound-service/northbound-api/src/main/java/org/openkilda/northbound/dto/v2/switches/LagPortRequest.java
+++ b/src-java/northbound-service/northbound-api/src/main/java/org/openkilda/northbound/dto/v2/switches/LagPortRequest.java
@@ -15,11 +15,9 @@
package org.openkilda.northbound.dto.v2.switches;
-import com.fasterxml.jackson.databind.PropertyNamingStrategy;
-import com.fasterxml.jackson.databind.annotation.JsonNaming;
-import lombok.AllArgsConstructor;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Builder;
-import lombok.Builder.Default;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -27,11 +25,23 @@ import java.util.Set;
@Data
@NoArgsConstructor
-@AllArgsConstructor
-@JsonNaming(PropertyNamingStrategy.SnakeCaseStrategy.class)
-@Builder
public class LagPortRequest {
+ @JsonProperty("port_numbers")
private Set<Integer> portNumbers;
- @Default
- private Boolean lacpReply = true;
+
+ @JsonProperty("lacp_reply")
+ private Boolean lacpReply;
+
+ @Builder
+ @JsonCreator
+ public LagPortRequest(
+ @JsonProperty("port_numbers") Set<Integer> portNumbers,
+ @JsonProperty("lacp_reply") Boolean lacpReply) {
+ this.portNumbers = portNumbers;
+ setLacpReply(lacpReply);
+ }
+
+ public void setLacpReply(Boolean lacpReply) {
+ this.lacpReply = lacpReply == null ? true : lacpReply;
+ }
} | ['src-java/northbound-service/northbound-api/src/main/java/org/openkilda/northbound/dto/v2/switches/LagPortRequest.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 11,321,323 | 2,363,586 | 289,817 | 3,081 | 946 | 191 | 28 | 1 | 288 | 38 | 82 | 9 | 0 | 0 | 1970-01-01T00:27:47 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,060 | telstra/open-kilda/4252/4251 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4251 | https://github.com/telstra/open-kilda/pull/4252 | https://github.com/telstra/open-kilda/pull/4252 | 1 | closes | Kilda wouldn't reinstall meter with incorrect burst size | **Steps to reproduce (for local env):**
1. Deploy Kilda with any topology
2. Connect to lab_service container `docker exec -it lab_service-1 /bin/bash`
3. Remove broadcast rule `ovs-ofctl -O OpenFlow13 --strict del-flows ofsw2 priority=31768,udp,dl_dst=00:26:e1:ff:ff:ff,tp_dst=61231`
4. Modify broadcast meter `ovs-ofctl -O OpenFlow13 mod-meter ofsw2 meter=2,pktps,burst,stats,bands=type=drop,rate=200,burst_size=5000`
5. Validate switch. You will get 1 missing rule and 1 misconfigured meter in response. (expected bust size 4096, actual - 5000)
6. Synchronize switch
7. Validate switch
**Expected result:**
0 missing rules and 0 misconfigured rules
**Actual result:**
0 missing rules, 1 misconfigured rules with incorrect burst size.
**Reson:**
During rule installation Kilda checks existing meters. If meter with needed ID exists Kilda checks meter parameters: rate and flags (but **not** burst size). If parameters are correct - Kilda just skips meter step. If parameters are different - Kilda removes old meter and installs new one. To fix the problem Kilda must check burst size too.
User can face with this problem during switch flapping. Some times after flapping we got incorrect burst sizes for system meters and Kilda just skips fixing of such meters during switch init synchronization. | 1d1a98cebd8993258844de6aa5630995be6f4332 | 877e435ff88e3c9358383fb01f8bc234c181cc52 | https://github.com/telstra/open-kilda/compare/1d1a98cebd8993258844de6aa5630995be6f4332...877e435ff88e3c9358383fb01f8bc234c181cc52 | diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/switchmanager/SwitchManager.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/switchmanager/SwitchManager.java
index 28ee1be5d..49406e77d 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/switchmanager/SwitchManager.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/switchmanager/SwitchManager.java
@@ -2267,35 +2267,39 @@ public class SwitchManager implements IFloodlightModule, IFloodlightService, ISw
@VisibleForTesting
void processMeter(IOFSwitch sw, OFMeterMod meterMod) {
long meterId = meterMod.getMeterId();
- OFMeterConfig meterConfig;
+ OFMeterConfig actualMeterConfig;
try {
- meterConfig = getMeter(sw.getId(), meterId);
+ actualMeterConfig = getMeter(sw.getId(), meterId);
} catch (SwitchOperationException e) {
logger.warn("Meter {} won't be installed on the switch {}: {}", meterId, sw.getId(), e.getMessage());
return;
}
- OFMeterBandDrop meterBandDrop = Optional.ofNullable(meterConfig)
+ OFMeterBandDrop actualMeterBandDrop = Optional.ofNullable(actualMeterConfig)
.map(OFMeterConfig::getEntries)
.flatMap(entries -> entries.stream().findFirst())
.map(OFMeterBandDrop.class::cast)
.orElse(null);
try {
- OFMeterBandDrop ofMeterBandDrop = sw.getOFFactory().getVersion().compareTo(OF_13) > 0
+ OFMeterBandDrop expectedMeterBandDrop = sw.getOFFactory().getVersion().compareTo(OF_13) > 0
? (OFMeterBandDrop) meterMod.getBands().get(0) : (OFMeterBandDrop) meterMod.getMeters().get(0);
- long rate = ofMeterBandDrop.getRate();
- Set<OFMeterFlags> flags = meterMod.getFlags();
+ long expectedRate = expectedMeterBandDrop.getRate();
+ long expectedBurstSize = expectedMeterBandDrop.getBurstSize();
+ Set<OFMeterFlags> expectedFlags = meterMod.getFlags();
- if (meterBandDrop != null && meterBandDrop.getRate() == rate
- && CollectionUtils.isEqualCollection(meterConfig.getFlags(), flags)) {
+ if (actualMeterBandDrop != null && actualMeterBandDrop.getRate() == expectedRate
+ && actualMeterBandDrop.getBurstSize() == expectedBurstSize
+ && CollectionUtils.isEqualCollection(actualMeterConfig.getFlags(), expectedFlags)) {
logger.debug("Meter {} won't be reinstalled on switch {}. It already exists", meterId, sw.getId());
return;
}
- if (meterBandDrop != null) {
- logger.info("Meter {} with origin rate {} will be reinstalled on {} switch.",
- meterId, sw.getId(), meterBandDrop.getRate());
+ if (actualMeterBandDrop != null) {
+ logger.info("Meter {} on switch {} has rate={}, burst size={} and flags={} but it must have "
+ + "rate={}, burst size={} and flags={}. Meter will be reinstalled.",
+ meterId, sw.getId(), actualMeterBandDrop.getRate(), actualMeterBandDrop.getBurstSize(),
+ actualMeterConfig.getFlags(), expectedRate, expectedBurstSize, expectedFlags);
buildAndDeleteMeter(sw, sw.getId(), meterId);
sendBarrierRequest(sw);
}
diff --git a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/switchmanager/SwitchManagerTest.java b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/switchmanager/SwitchManagerTest.java
index 5f612baa3..70d985e48 100644
--- a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/switchmanager/SwitchManagerTest.java
+++ b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/switchmanager/SwitchManagerTest.java
@@ -282,7 +282,7 @@ public class SwitchManagerTest {
public void runInstallVerificationBroadcastRule(boolean supportsUdpPortMatch) throws Exception {
mockGetGroupsRequest(ImmutableList.of(GroupId.ROUND_TRIP_LATENCY_GROUP_ID.intValue()));
- mockGetMetersRequest(ImmutableList.of(meterId), true, 10L);
+ mockGetMetersRequest(ImmutableList.of(meterId), true, 10L, 100);
mockFlowStatsRequest(VERIFICATION_BROADCAST_RULE_COOKIE);
mockBarrierRequest();
@@ -316,7 +316,7 @@ public class SwitchManagerTest {
@Test
public void installVerificationUnicastRule() throws Exception {
- mockGetMetersRequest(Lists.newArrayList(broadcastMeterId), true, 10L);
+ mockGetMetersRequest(Lists.newArrayList(broadcastMeterId), true, 10L, 100);
mockBarrierRequest();
expect(iofSwitch.write(anyObject(OFMeterMod.class))).andReturn(true).times(1);
Capture<OFFlowMod> capture = prepareForInstallTest();
@@ -423,7 +423,7 @@ public class SwitchManagerTest {
@Test
public void installUnicastVerificationRuleVxlan() throws Exception {
- mockGetMetersRequest(Lists.newArrayList(broadcastMeterId), true, 10L);
+ mockGetMetersRequest(Lists.newArrayList(broadcastMeterId), true, 10L, 100);
mockBarrierRequest();
expect(iofSwitch.write(anyObject(OFMeterMod.class))).andReturn(true).times(1);
Capture<OFFlowMod> capture = prepareForInstallTest();
@@ -661,7 +661,6 @@ public class SwitchManagerTest {
mockBarrierRequest();
mockFlowStatsRequest(cookie);
- mockGetMetersRequest(Collections.emptyList(), true, 0);
replay(ofSwitchService, iofSwitch, switchDescription);
@@ -1064,7 +1063,7 @@ public class SwitchManagerTest {
// define that switch is Centec
expect(switchDescription.getManufacturerDescription()).andStubReturn("Centec Inc.");
expect(featureDetectorService.detectSwitch(iofSwitch)).andStubReturn(Sets.newHashSet(METERS));
- mockGetMetersRequest(Collections.emptyList(), true, 0);
+ mockGetMetersRequest(Collections.emptyList(), true, 0, 0);
mockBarrierRequest();
Capture<OFMeterMod> capture = EasyMock.newCapture(CaptureType.ALL);
@@ -1105,7 +1104,8 @@ public class SwitchManagerTest {
expect(switchDescription.getManufacturerDescription()).andStubReturn("Centec Inc.");
expect(featureDetectorService.detectSwitch(iofSwitch)).andStubReturn(Sets.newHashSet(METERS));
mockBarrierRequest();
- mockGetMetersRequest(Lists.newArrayList(unicastMeterId), false, expectedRate);
+ mockGetMetersRequest(Lists.newArrayList(unicastMeterId), false, expectedRate,
+ config.getSystemMeterBurstSizeInPackets());
Capture<OFMeterMod> capture = EasyMock.newCapture(CaptureType.ALL);
expect(iofSwitch.write(capture(capture))).andReturn(true).times(2);
@@ -1113,7 +1113,7 @@ public class SwitchManagerTest {
replay(ofSwitchService, iofSwitch, switchDescription, featureDetectorService);
// when
- Set<OFMeterFlags> flags = ImmutableSet.of(OFMeterFlags.KBPS, OFMeterFlags.STATS, OFMeterFlags.BURST);
+ Set<OFMeterFlags> flags = ImmutableSet.of(OFMeterFlags.KBPS, OFMeterFlags.BURST);
OFMeterMod ofMeterMod = buildMeterMod(iofSwitch.getOFFactory(), expectedRate,
config.getSystemMeterBurstSizeInPackets(), unicastMeterId, flags);
switchManager.processMeter(iofSwitch, ofMeterMod);
@@ -1148,7 +1148,8 @@ public class SwitchManagerTest {
expect(iofSwitch.write(capture(capture))).andReturn(true).times(2);
mockBarrierRequest();
- mockGetMetersRequest(Lists.newArrayList(unicastMeter), true, originRate);
+ mockGetMetersRequest(Lists.newArrayList(unicastMeter), true, originRate,
+ config.getSystemMeterBurstSizeInPackets());
replay(ofSwitchService, iofSwitch, switchDescription, featureDetectorService);
//when
@@ -1168,6 +1169,45 @@ public class SwitchManagerTest {
assertThat(actual.get(1), hasProperty("flags", containsInAnyOrder(flags.toArray())));
}
+ @Test
+ public void shouldRenstallMetersIfBurstSizeIsUpdated() throws Exception {
+ long unicastMeter = createMeterIdForDefaultRule(VERIFICATION_UNICAST_RULE_COOKIE).getValue();
+ long originBurstSize = config.getSystemMeterBurstSizeInPackets();
+ long updatedBurstSize = config.getSystemMeterBurstSizeInPackets() + 10;
+
+ // given
+ expect(ofSwitchService.getActiveSwitch(dpid)).andStubReturn(iofSwitch);
+ expect(iofSwitch.getOFFactory()).andStubReturn(ofFactory);
+ expect(iofSwitch.getSwitchDescription()).andStubReturn(switchDescription);
+ expect(iofSwitch.getId()).andStubReturn(dpid);
+ expect(switchDescription.getManufacturerDescription()).andStubReturn(StringUtils.EMPTY);
+ expect(featureDetectorService.detectSwitch(iofSwitch)).andStubReturn(Sets.newHashSet(PKTPS_FLAG));
+ Capture<OFMeterMod> capture = EasyMock.newCapture(CaptureType.ALL);
+ // 1 meter deletion + 1 meters installation
+ expect(iofSwitch.write(capture(capture))).andReturn(true).times(2);
+
+ mockBarrierRequest();
+ mockGetMetersRequest(Lists.newArrayList(unicastMeter), true, config.getUnicastRateLimit(),
+ originBurstSize);
+ replay(ofSwitchService, iofSwitch, switchDescription, featureDetectorService);
+
+ //when
+ Set<OFMeterFlags> flags = ImmutableSet.of(OFMeterFlags.PKTPS, OFMeterFlags.STATS, OFMeterFlags.BURST);
+ OFMeterMod ofMeterMod = buildMeterMod(iofSwitch.getOFFactory(), config.getUnicastRateLimit(),
+ updatedBurstSize, unicastMeter, flags);
+ switchManager.processMeter(iofSwitch, ofMeterMod);
+
+ final List<OFMeterMod> actual = capture.getValues();
+ assertEquals(2, actual.size());
+
+ // verify meters deletion
+ assertThat(actual.get(0), hasProperty("command", equalTo(OFMeterModCommand.DELETE)));
+ // verify meter installation
+ assertThat(actual.get(1), hasProperty("command", equalTo(OFMeterModCommand.ADD)));
+ assertThat(actual.get(1), hasProperty("meterId", equalTo(unicastMeter)));
+ assertThat(actual.get(1), hasProperty("flags", containsInAnyOrder(flags.toArray())));
+ }
+
@Test
public void shouldNotInstallMetersIfAlreadyExists() throws Exception {
long expectedRate = config.getBroadcastRateLimit();
@@ -1185,7 +1225,8 @@ public class SwitchManagerTest {
.andReturn(Sets.newHashSet(GROUP_PACKET_OUT_CONTROLLER, NOVIFLOW_COPY_FIELD, PKTPS_FLAG))
.times(8);
mockBarrierRequest();
- mockGetMetersRequest(Lists.newArrayList(unicastMeterId, broadcastMeterId), true, expectedRate);
+ mockGetMetersRequest(Lists.newArrayList(unicastMeterId, broadcastMeterId), true, expectedRate,
+ config.getBroadcastRateLimit());
mockGetGroupsRequest(Lists.newArrayList(GroupId.ROUND_TRIP_LATENCY_GROUP_ID.intValue()));
replay(ofSwitchService, iofSwitch, switchDescription, featureDetectorService);
@@ -1309,17 +1350,19 @@ public class SwitchManagerTest {
return capture;
}
- private void mockGetMetersRequest(List<Long> meterIds, boolean supportsPkts, long rate) throws Exception {
+ private void mockGetMetersRequest(List<Long> meterIds, boolean supportsPkts, long rate, long burstSize)
+ throws Exception {
List<OFMeterConfig> meterConfigs = new ArrayList<>(meterIds.size());
for (Long meterId : meterIds) {
OFMeterBandDrop bandDrop = mock(OFMeterBandDrop.class);
expect(bandDrop.getRate()).andStubReturn(rate);
+ expect(bandDrop.getBurstSize()).andStubReturn(burstSize);
OFMeterConfig meterConfig = mock(OFMeterConfig.class);
expect(meterConfig.getEntries()).andStubReturn(Collections.singletonList(bandDrop));
expect(meterConfig.getMeterId()).andStubReturn(meterId);
- Set<OFMeterFlags> flags = ImmutableSet.of(OFMeterFlags.STATS,
+ Set<OFMeterFlags> flags = ImmutableSet.of(OFMeterFlags.STATS, OFMeterFlags.BURST,
supportsPkts ? OFMeterFlags.PKTPS : OFMeterFlags.KBPS);
expect(meterConfig.getFlags()).andStubReturn(flags);
replay(bandDrop, meterConfig); | ['src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/switchmanager/SwitchManager.java', 'src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/switchmanager/SwitchManagerTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 9,043,224 | 1,883,640 | 234,767 | 2,523 | 2,018 | 419 | 26 | 1 | 1,322 | 183 | 332 | 19 | 0 | 0 | 1970-01-01T00:27:00 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,052 | telstra/open-kilda/4859/4810 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4810 | https://github.com/telstra/open-kilda/pull/4859 | https://github.com/telstra/open-kilda/pull/4859 | 1 | close | flows are not rerouted by flowMonitoring in case main path does not satisfy SLA (shrad_count > 1) | **Steps to reproduce:**
1. update `confd/vars/main.yaml`
```
kilda_flow_sla_check_shard_count: 2
```
2. build and start env with updated value;
3. create 3 the same flows with the `max_latency` strategy;
4. make sure that the path of all 3 flows are the same;
5. update path ( path does not satisfy SLA)
**Expected result:**
All 3 flows are rerouted due to `"details": "Reason: Flow latency become healthy",`
**Actual result:**
sometimes only one flow is rerouted, the rest flows are untouched
**NOTE:**
The issue can be reproduced by autotest `def "Able to detect and reroute a flow with MAX_LATENCY strategy when main path does not satisfy latency SLA"() {` from the `test/repr_issue_with_fl_monitoring_shard_count` branch (pr #4811 ) | 344551a65964525809a3b2ef61c1764f31d72875 | 044f47cbc9478eb007ba2cdd270783c18c1b0c5d | https://github.com/telstra/open-kilda/compare/344551a65964525809a3b2ef61c1764f31d72875...044f47cbc9478eb007ba2cdd270783c18c1b0c5d | diff --git a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/fsm/FlowLatencyMonitoringFsm.java b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/fsm/FlowLatencyMonitoringFsm.java
index 3157b17b2..0eee8aed5 100644
--- a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/fsm/FlowLatencyMonitoringFsm.java
+++ b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/fsm/FlowLatencyMonitoringFsm.java
@@ -125,7 +125,19 @@ public class FlowLatencyMonitoringFsm extends AbstractBaseFsm<FlowLatencyMonitor
}
public void enterUnstable(State from, State to, Event event, Context context) {
- saveLastEventInfo(lastStableState);
+ switch (event) {
+ case HEALTHY:
+ saveLastEventInfo(HEALTHY);
+ break;
+ case TIER_1_FAILED:
+ saveLastEventInfo(TIER_1_FAILED);
+ break;
+ case TIER_2_FAILED:
+ saveLastEventInfo(TIER_2_FAILED);
+ break;
+ default:
+ throw new IllegalArgumentException(format("Wrong event type '%s' for unstable state.", event));
+ }
}
public void saveHealthyEventInfo(State from, State to, Event event, Context context) {
@@ -156,13 +168,17 @@ public class FlowLatencyMonitoringFsm extends AbstractBaseFsm<FlowLatencyMonitor
}
public void sendFlowSyncRequest(State from, State to, Event event, Context context) {
- log.info("Flow {} {} latency moved to healthy.", flowId, direction);
- context.getCarrier().sendFlowSyncRequest(flowId);
+ if (lastStableState != HEALTHY) {
+ log.info("Flow {} {} latency moved to healthy.", flowId, direction);
+ context.getCarrier().sendFlowSyncRequest(flowId);
+ }
}
public void sendFlowRerouteRequest(State from, State to, Event event, Context context) {
- log.info("Flow {} {} latency moved to unhealthy.", flowId, direction);
- context.getCarrier().sendFlowRerouteRequest(flowId);
+ if (lastStableState != TIER_1_FAILED && lastStableState != TIER_2_FAILED) {
+ log.info("Flow {} {} latency moved to unhealthy.", flowId, direction);
+ context.getCarrier().sendFlowRerouteRequest(flowId);
+ }
}
public void saveTier2FailedEventInfo(State from, State to, Event event, Context context) {
@@ -296,10 +312,6 @@ public class FlowLatencyMonitoringFsm extends AbstractBaseFsm<FlowLatencyMonitor
return maxLatencyTier2;
}
- public State getLastStableState() {
- return lastStableState;
- }
-
@Value
@Builder
public static class Context { | ['src-java/flowmonitoring-topology/flowmonitoring-storm-topology/src/main/java/org/openkilda/wfm/topology/flowmonitoring/fsm/FlowLatencyMonitoringFsm.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 11,179,634 | 2,333,550 | 286,431 | 3,052 | 1,335 | 281 | 30 | 1 | 762 | 113 | 199 | 21 | 0 | 1 | 1970-01-01T00:27:36 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,054 | telstra/open-kilda/4839/4838 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4838 | https://github.com/telstra/open-kilda/pull/4839 | https://github.com/telstra/open-kilda/pull/4839 | 1 | close | Switch Sync timeout if switch has 2 default rules with same cookie | **Steps to reproduce:**
1. Create excess default rule with existing default cookie.
OVS:
```
docker exec -it lab_service-1 bash
ovs-ofctl -O OpenFlow13 add-flow ofsw2 cookie=0x8000000000000002,priority=1,in_port=11,dl_src=00:05:95:41:ec:8c/ff:ff:ff:ff:ff:ff,actions=drop
```
2. validate switch
3. sync switch
**Expected result:**
(Optionally) excess rule after validation
Switch sync removes the rule
**Actual result:**
misconfigured rule
Switch sync timeout | 963619ef04bc826080d2ad92d0981ffb3478e4cd | 364af1081c314487be55974ec138841fc2fdd46a | https://github.com/telstra/open-kilda/compare/963619ef04bc826080d2ad92d0981ffb3478e4cd...364af1081c314487be55974ec138841fc2fdd46a | diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/BatchData.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/BatchData.java
index ac3c830e1..24dfc5338 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/BatchData.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/BatchData.java
@@ -15,6 +15,8 @@
package org.openkilda.floodlight.command.rulemanager;
+import org.openkilda.rulemanager.SpeakerData;
+
import lombok.Builder;
import lombok.Data;
import org.projectfloodlight.openflow.protocol.OFMessage;
@@ -26,5 +28,6 @@ public class BatchData {
private boolean group;
private boolean flow;
private OFMessage message;
+ private SpeakerData origin;
private boolean presenceBeVerified;
}
diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchExecutor.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchExecutor.java
index 414c7ba77..a38f28041 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchExecutor.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchExecutor.java
@@ -43,16 +43,15 @@ import org.projectfloodlight.openflow.protocol.OFMessage;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
@Slf4j
public class OfBatchExecutor {
@@ -106,7 +105,7 @@ public class OfBatchExecutor {
public void executeBatch() {
log.debug("Execute batch start (key={})", kafkaKey);
List<UUID> stageCommandsUuids = holder.getCurrentStage();
- Map<SpeakerData, OFMessage> stageMessages = new HashMap<>();
+ List<BatchData> stageMessages = new ArrayList<>();
for (UUID uuid : stageCommandsUuids) {
if (holder.canExecute(uuid)) {
BatchData batchData = holder.getByUUid(uuid);
@@ -116,10 +115,7 @@ public class OfBatchExecutor {
hasFlows |= batchData.isFlow();
hasMeters |= batchData.isMeter();
hasGroups |= batchData.isGroup();
- OFMessage previous = stageMessages.put(holder.getSpeakerDataByUUid(uuid), batchData.getMessage());
- if (previous != null) {
- log.warn("Command with uuid {} already processed.", uuid);
- }
+ stageMessages.add(batchData);
} else {
Map<UUID, String> blockingDependencies = holder.getBlockingDependencies(uuid);
if (!blockingDependencies.isEmpty()) {
@@ -137,7 +133,9 @@ public class OfBatchExecutor {
removeAlreadyExists(stageMessages);
}
- Collection<OFMessage> ofMessages = stageMessages.values();
+ Collection<OFMessage> ofMessages = stageMessages.stream()
+ .map(BatchData::getMessage)
+ .collect(Collectors.toList());
List<CompletableFuture<Optional<OFMessage>>> requests = new ArrayList<>();
try (Session session = sessionService.open(messageContext, iofSwitch)) {
for (OFMessage message : ofMessages) {
@@ -175,7 +173,7 @@ public class OfBatchExecutor {
});
}
- private void removeAlreadyExists(Map<SpeakerData, OFMessage> stageMessages) {
+ private void removeAlreadyExists(List<BatchData> stageMessages) {
if (hasMeters) {
meterStats = switchDataProvider.getMeters();
}
@@ -186,15 +184,15 @@ public class OfBatchExecutor {
try {
List<SpeakerData> speakerData = new ArrayList<>(meterStats.get());
speakerData.addAll(groupStats.get());
- Set<SpeakerData> toRemove = new HashSet<>();
- for (Entry<SpeakerData, OFMessage> entry : stageMessages.entrySet()) {
- if (speakerData.contains(entry.getKey())) {
- toRemove.add(entry.getKey());
+ Set<BatchData> toRemove = new HashSet<>();
+ for (BatchData batchData : stageMessages) {
+ if (speakerData.contains(batchData.getOrigin())) {
+ toRemove.add(batchData);
}
}
toRemove.forEach(data -> {
log.debug("OpenFlow entry is already exist. Skipping command {}", data);
- holder.recordSuccessUuid(data.getUuid());
+ holder.recordSuccessUuid(data.getOrigin().getUuid());
stageMessages.remove(data);
});
} catch (ExecutionException | InterruptedException e) {
diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchHolder.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchHolder.java
index f39445a54..b5d430a9e 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchHolder.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchHolder.java
@@ -15,8 +15,6 @@
package org.openkilda.floodlight.command.rulemanager;
-import static java.lang.String.format;
-
import org.openkilda.floodlight.api.request.rulemanager.OfEntityBatch;
import org.openkilda.floodlight.api.response.rulemanager.SpeakerCommandResponse;
import org.openkilda.floodlight.converter.rulemanager.OfFlowConverter;
@@ -30,7 +28,6 @@ import org.openkilda.model.cookie.CookieBase;
import org.openkilda.rulemanager.FlowSpeakerData;
import org.openkilda.rulemanager.GroupSpeakerData;
import org.openkilda.rulemanager.MeterSpeakerData;
-import org.openkilda.rulemanager.SpeakerData;
import lombok.extern.slf4j.Slf4j;
import net.floodlightcontroller.core.internal.IOFSwitchService;
@@ -45,7 +42,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
-import java.util.stream.Stream;
@Slf4j
public class OfBatchHolder implements OfEntityBatch {
@@ -142,17 +138,6 @@ public class OfBatchHolder implements OfEntityBatch {
return commandMap.get(uuid);
}
- /**
- * Get speaker data by UUID.
- */
- public SpeakerData getSpeakerDataByUUid(UUID uuid) {
- return Stream.of(flowsMap.values(), metersMap.values(), groupsMap.values())
- .flatMap(Collection::stream)
- .filter(data -> uuid.equals(data.getUuid()))
- .findFirst()
- .orElseThrow(() -> new IllegalStateException(format("Can't find speaker data with uuid %s", uuid)));
- }
-
public MeterSpeakerData getByMeterId(MeterId meterId) {
return metersMap.get(meterId);
}
@@ -185,7 +170,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfFlowConverter.INSTANCE.convertInstallFlowCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
flowsMap.put(data.getCookie(), data);
- BatchData batchData = BatchData.builder().flow(true).message(message).presenceBeVerified(true).build();
+ BatchData batchData = BatchData.builder()
+ .flow(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@@ -195,10 +185,14 @@ public class OfBatchHolder implements OfEntityBatch {
OFFactory factory = iofSwitchService.getSwitch(dpId).getOFFactory();
OFMessage message = OfFlowConverter.INSTANCE.convertModifyFlowCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
- BatchData batchData = BatchData.builder().flow(true).message(message).presenceBeVerified(true).build();
- commandMap.put(data.getUuid(), batchData);
+ BatchData batchData = BatchData.builder()
+ .flow(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
flowsMap.put(data.getCookie(), data);
- executionGraph.add(data.getUuid(), data.getDependsOn());
+ addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@Override
@@ -208,7 +202,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfFlowConverter.INSTANCE.convertDeleteFlowCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
flowsMap.put(data.getCookie(), data);
- BatchData batchData = BatchData.builder().flow(true).message(message).presenceBeVerified(false).build();
+ BatchData batchData = BatchData.builder()
+ .flow(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(false)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@@ -219,7 +218,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfMeterConverter.INSTANCE.convertInstallMeterCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
metersMap.put(data.getMeterId(), data);
- BatchData batchData = BatchData.builder().meter(true).message(message).presenceBeVerified(true).build();
+ BatchData batchData = BatchData.builder()
+ .meter(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@@ -229,10 +233,14 @@ public class OfBatchHolder implements OfEntityBatch {
OFFactory factory = iofSwitchService.getSwitch(dpId).getOFFactory();
OFMessage message = OfMeterConverter.INSTANCE.convertModifyMeterCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
- BatchData batchData = BatchData.builder().meter(true).message(message).presenceBeVerified(true).build();
- commandMap.put(data.getUuid(), batchData);
+ BatchData batchData = BatchData.builder()
+ .meter(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
metersMap.put(data.getMeterId(), data);
- executionGraph.add(data.getUuid(), data.getDependsOn());
+ addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@Override
@@ -242,7 +250,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfMeterConverter.INSTANCE.convertDeleteMeterCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
metersMap.put(data.getMeterId(), data);
- BatchData batchData = BatchData.builder().meter(true).message(message).presenceBeVerified(false).build();
+ BatchData batchData = BatchData.builder()
+ .meter(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(false)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@@ -253,7 +266,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfGroupConverter.INSTANCE.convertInstallGroupCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
groupsMap.put(data.getGroupId(), data);
- BatchData batchData = BatchData.builder().group(true).message(message).presenceBeVerified(true).build();
+ BatchData batchData = BatchData.builder()
+ .group(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@@ -263,10 +281,14 @@ public class OfBatchHolder implements OfEntityBatch {
OFFactory factory = iofSwitchService.getSwitch(dpId).getOFFactory();
OFMessage message = OfGroupConverter.INSTANCE.convertModifyGroupCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
- BatchData batchData = BatchData.builder().group(true).message(message).presenceBeVerified(true).build();
+ BatchData batchData = BatchData.builder()
+ .group(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(true)
+ .build();
commandMap.put(data.getUuid(), batchData);
- groupsMap.put(data.getGroupId(), data);
- executionGraph.add(data.getUuid(), data.getDependsOn());
+ addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
@Override
@@ -276,7 +298,12 @@ public class OfBatchHolder implements OfEntityBatch {
OFMessage message = OfGroupConverter.INSTANCE.convertDeleteGroupCommand(data, factory);
xidMapping.put(message.getXid(), data.getUuid());
groupsMap.put(data.getGroupId(), data);
- BatchData batchData = BatchData.builder().group(true).message(message).presenceBeVerified(false).build();
+ BatchData batchData = BatchData.builder()
+ .group(true)
+ .message(message)
+ .origin(data)
+ .presenceBeVerified(false)
+ .build();
addBatchData(data.getUuid(), batchData, data.getDependsOn());
}
| ['src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchHolder.java', 'src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/OfBatchExecutor.java', 'src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/command/rulemanager/BatchData.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 11,446,574 | 2,390,825 | 291,844 | 3,072 | 5,723 | 1,067 | 116 | 3 | 478 | 51 | 143 | 17 | 0 | 1 | 1970-01-01T00:27:34 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,055 | telstra/open-kilda/4812/4805 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4805 | https://github.com/telstra/open-kilda/pull/4812 | https://github.com/telstra/open-kilda/pull/4812 | 1 | close | Switch sync unable to handle misconfigured rules with the same cookie | Steps to reproduce:
1. Create two rules with the same cookie. One correct and one misconfigured.
2. Run switch sync on the switch.
Actual result:
Misconfigured rule is still on the switch.
Expected result:
No misconfigured rules on the switch. | e107f48b89025669a439cfaf16b37128fbe65cb7 | debecbc971d9d5f3afbcb32dd5f3417507c9e6ea | https://github.com/telstra/open-kilda/compare/e107f48b89025669a439cfaf16b37128fbe65cb7...debecbc971d9d5f3afbcb32dd5f3417507c9e6ea | diff --git a/src-java/swmanager-topology/swmanager-storm-topology/src/main/java/org/openkilda/wfm/topology/switchmanager/fsm/SwitchSyncFsm.java b/src-java/swmanager-topology/swmanager-storm-topology/src/main/java/org/openkilda/wfm/topology/switchmanager/fsm/SwitchSyncFsm.java
index c77cbea48..3c04b53c0 100644
--- a/src-java/swmanager-topology/swmanager-storm-topology/src/main/java/org/openkilda/wfm/topology/switchmanager/fsm/SwitchSyncFsm.java
+++ b/src-java/swmanager-topology/swmanager-storm-topology/src/main/java/org/openkilda/wfm/topology/switchmanager/fsm/SwitchSyncFsm.java
@@ -87,6 +87,7 @@ import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
+import java.util.stream.Stream;
@Slf4j
public class SwitchSyncFsm extends AbstractBaseFsm<SwitchSyncFsm, SwitchSyncState, SwitchSyncEvent, Object> {
@@ -261,7 +262,7 @@ public class SwitchSyncFsm extends AbstractBaseFsm<SwitchSyncFsm, SwitchSyncStat
log.info("Compute reinstall rules (switch={}, key={})", switchId, key);
try {
List<FlowSpeakerData> misconfiguredRulesToRemove = reinstalledRulesCookies.stream()
- .map(this::findActualFlowByCookie)
+ .flatMap(this::findActualFlowsByCookie)
.collect(Collectors.toList());
toRemove.addAll(misconfiguredRulesToRemove);
List<FlowSpeakerData> misconfiguredRulesToInstall = validationResult.getExpectedEntries().stream()
@@ -284,7 +285,7 @@ public class SwitchSyncFsm extends AbstractBaseFsm<SwitchSyncFsm, SwitchSyncStat
log.info("Compute remove rules (switch={}, key={})", switchId, key);
try {
List<FlowSpeakerData> excessRules = removedFlowRulesCookies.stream()
- .map(this::findActualFlowByCookie)
+ .flatMap(this::findActualFlowsByCookie)
.collect(Collectors.toList());
toRemove.addAll(excessRules);
} catch (Exception e) {
@@ -357,12 +358,11 @@ public class SwitchSyncFsm extends AbstractBaseFsm<SwitchSyncFsm, SwitchSyncStat
}
}
- private FlowSpeakerData findActualFlowByCookie(long cookie) {
+ private Stream<FlowSpeakerData> findActualFlowsByCookie(long cookie) {
return validationResult.getActualFlows().stream()
.filter(flowSpeakerData -> flowSpeakerData.getCookie().getValue() == cookie)
- .findFirst()
- .orElseThrow(() -> new IllegalStateException(
- format("Actual rule with cookie %s not found", cookie)));
+ .collect(Collectors.toList())
+ .stream();
}
private MeterSpeakerData findActualMeterById(long meterId) { | ['src-java/swmanager-topology/swmanager-storm-topology/src/main/java/org/openkilda/wfm/topology/switchmanager/fsm/SwitchSyncFsm.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 11,523,958 | 2,406,634 | 293,523 | 3,083 | 676 | 121 | 12 | 1 | 253 | 40 | 53 | 9 | 0 | 0 | 1970-01-01T00:27:32 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,056 | telstra/open-kilda/4767/4606 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4606 | https://github.com/telstra/open-kilda/pull/4767 | https://github.com/telstra/open-kilda/pull/4767 | 1 | close | [flowHistory] incorrect fowrawd/reverse Paths fields in beforeState section after updating flow endpoint | **Steps to reproduce:**
1. create a multi-switch flow `(00:00:00:00:00:00:00:02_7 - 00:00:00:00:00:00:00:03_8)`
2. update the flow (for example make single-switch flow `(00:00:00:00:00:00:00:02_7 - 00:00:00:00:00:00:00:02_8)`)
3. check an `update` action in flow history
**Expected result:**
The forward/reverse paths fields are correct.
```
{"type": "stateBefore",
...
"forwardPath": "[{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":7,\\"output_port\\":34},{\\"switch_id\\":\\"00:00:00:00:00:00:00:03\\",\\"input_port\\":2,\\"output_port\\":8}]",
"reversePath": "[{\\"switch_id\\":\\"00:00:00:00:00:00:00:03\\",\\"input_port\\":8,\\"output_port\\":2},{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":34,\\"output_port\\":7}]",
...}
```
**Actual result:**
The forward/reverse paths fields are incorrect. wrong dst swId is set.
```
{ "type": "stateBefore",
...
"forwardPath": "[{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":7,\\"output_port\\":34},{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":2,\\"output_port\\":8}]",
"reversePath": "[{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":8,\\"output_port\\":2},{\\"switch_id\\":\\"00:00:00:00:00:00:00:02\\",\\"input_port\\":34,\\"output_port\\":7}]",
...}
``` | 144c7d4e37ed8c688a857bb577cee5a43b96caa4 | 22b6dc278b6900edac9764685f1154687095a96e | https://github.com/telstra/open-kilda/compare/144c7d4e37ed8c688a857bb577cee5a43b96caa4...22b6dc278b6900edac9764685f1154687095a96e | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/CompleteFlowPathRemovalAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/CompleteFlowPathRemovalAction.java
index afa5b107e..8e782223d 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/CompleteFlowPathRemovalAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/CompleteFlowPathRemovalAction.java
@@ -74,6 +74,7 @@ public class CompleteFlowPathRemovalAction extends
updateIslsForFlowPath(removedPaths.getReverse());
}
if (removedPaths != null) {
+ restoreFlowEndpoints(removedPaths, originalFlow);
saveRemovalActionWithDumpToHistory(stateMachine, originalFlow, removedPaths);
}
}
@@ -106,11 +107,20 @@ public class CompleteFlowPathRemovalAction extends
updateIslsForFlowPath(removedPaths.getReverse());
}
if (removedPaths != null) {
+ restoreFlowEndpoints(removedPaths, originalFlow);
saveRemovalActionWithDumpToHistory(stateMachine, originalFlow, removedPaths);
}
}
}
+ private void restoreFlowEndpoints(FlowPathPair flowPathPair, Flow originalFlow) {
+ Flow flow = flowPathPair.getForward().getFlow();
+ flow.setSrcSwitch(originalFlow.getSrcSwitch());
+ flow.setSrcPort(originalFlow.getSrcPort());
+ flow.setDestSwitch(originalFlow.getDestSwitch());
+ flow.setDestPort(originalFlow.getDestPort());
+ }
+
private void removeRejectedFlowPaths(Flow flow, FlowUpdateFsm stateMachine) {
stateMachine.getRejectedPaths().stream()
.forEach(pathId -> | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/CompleteFlowPathRemovalAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 11,380,792 | 2,376,160 | 290,334 | 3,053 | 511 | 100 | 10 | 1 | 1,277 | 73 | 447 | 25 | 0 | 2 | 1970-01-01T00:27:29 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,057 | telstra/open-kilda/4402/4389 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4389 | https://github.com/telstra/open-kilda/pull/4402 | https://github.com/telstra/open-kilda/pull/4402 | 1 | resolves | SERVER_42_ISL_RTT_OUTPUT_COOKIE is not installed within InstallActions.INSTALL_DEFAULTS | **Steps to reroduce:**
enable islRtt on a switch;
delete rules from the switch (DROP_ALL)
install rules (INSTALL_DEFAULTS)
**Actual result:**
SERVER_42_ISL_RTT_OUTPUT_COOKIE is not installed
-9223372036854775780 (800000000000001c) - is missing
**Expected result:**
SERVER_42_ISL_RTT_OUTPUT_COOKIE is installed within InstallActions.INSTALL_DEFAULTS
| d7d0214f003fb23138fe95cc96ad4b18257023df | 53ea389fdb9feee0744f86e7a277e41040661dd4 | https://github.com/telstra/open-kilda/compare/d7d0214f003fb23138fe95cc96ad4b18257023df...53ea389fdb9feee0744f86e7a277e41040661dd4 | diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/kafka/RecordHandler.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/kafka/RecordHandler.java
index ef63a62d6..78ce8d8a1 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/kafka/RecordHandler.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/kafka/RecordHandler.java
@@ -914,8 +914,8 @@ class RecordHandler implements Runnable {
if (request.isServer42IslRttEnabled()) {
installedRules.add(processInstallDefaultFlowByCookie(request.getSwitchId(),
SERVER_42_ISL_RTT_TURNING_COOKIE));
- installedRules.add(processInstallDefaultFlowByCookie(request.getSwitchId(),
- SERVER_42_ISL_RTT_OUTPUT_COOKIE));
+ installedRules.add(switchManager.installServer42IslRttOutputFlow(dpid,
+ request.getServer42Port(), request.getServer42Vlan(), request.getServer42MacAddress()));
for (Integer port : request.getIslPorts()) {
installedRules.add(switchManager.installServer42IslRttInputFlow(dpid, server42Port, port));
}
diff --git a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/kafka/RecordHandlerTest.java b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/kafka/RecordHandlerTest.java
index dcdf43801..95eea79e4 100644
--- a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/kafka/RecordHandlerTest.java
+++ b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/kafka/RecordHandlerTest.java
@@ -15,14 +15,18 @@
package org.openkilda.floodlight.kafka;
+import static java.util.Collections.emptyList;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock;
+import static org.easymock.EasyMock.niceMock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
+import static org.openkilda.messaging.command.switches.InstallRulesAction.INSTALL_DEFAULTS;
import static org.openkilda.model.cookie.Cookie.SERVER_42_FLOW_RTT_OUTPUT_VLAN_COOKIE;
import static org.openkilda.model.cookie.Cookie.SERVER_42_FLOW_RTT_OUTPUT_VXLAN_COOKIE;
import static org.openkilda.model.cookie.Cookie.SERVER_42_ISL_RTT_OUTPUT_COOKIE;
+import static org.openkilda.model.cookie.Cookie.SERVER_42_ISL_RTT_TURNING_COOKIE;
import static org.openkilda.model.cookie.CookieBase.CookieType.SERVER_42_FLOW_RTT_INPUT;
import static org.openkilda.model.cookie.CookieBase.CookieType.SERVER_42_ISL_RTT_INPUT;
@@ -34,6 +38,7 @@ import org.openkilda.messaging.command.flow.InstallFlowForSwitchManagerRequest;
import org.openkilda.messaging.command.flow.InstallServer42Flow;
import org.openkilda.messaging.command.flow.ReinstallServer42FlowForSwitchManagerRequest;
import org.openkilda.messaging.command.switches.DeleteRulesCriteria;
+import org.openkilda.messaging.command.switches.SwitchRulesInstallRequest;
import org.openkilda.model.MacAddress;
import org.openkilda.model.SwitchId;
import org.openkilda.model.cookie.FlowSharedSegmentCookie;
@@ -42,6 +47,7 @@ import org.openkilda.model.cookie.PortColourCookie;
import com.google.common.collect.Lists;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.junit.Before;
import org.junit.Test;
import org.projectfloodlight.openflow.types.DatapathId;
@@ -70,7 +76,9 @@ public class RecordHandlerTest {
expect(context.getModuleContext()).andReturn(floodlightModuleContext).anyTimes();
expect(context.getKafkaSwitchManagerTopic()).andReturn("").anyTimes();
replay(context);
- recordHandler = new RecordHandler(context, null, null);
+
+ ConsumerRecord record = niceMock(ConsumerRecord.class);
+ recordHandler = new RecordHandler(context, null, record);
}
@Test
@@ -311,4 +319,25 @@ public class RecordHandlerTest {
new CommandMessage(new InstallFlowForSwitchManagerRequest(request), 0, CORRELATION_ID));
verify(switchManager);
}
+
+ @Test
+ public void shouldInstallServer42IslRttRulesOnInstallDefaults() throws SwitchOperationException {
+ expect(switchManager.installDefaultRules(DATAPATH_ID))
+ .andReturn(emptyList()).once();
+ expect(switchManager.installServer42IslRttOutputFlow(
+ DATAPATH_ID, SERVER42_PORT, SERVER42_VLAN, SERVER42_MAC_ADDRESS))
+ .andReturn(SERVER_42_ISL_RTT_OUTPUT_COOKIE).once();
+ expect(switchManager.installServer42IslRttTurningFlow(DATAPATH_ID))
+ .andReturn(SERVER_42_ISL_RTT_TURNING_COOKIE).once();
+ replay(switchManager);
+
+ SwitchRulesInstallRequest request = new SwitchRulesInstallRequest(SWITCH_ID, INSTALL_DEFAULTS);
+ request.setServer42IslRttEnabled(true);
+ request.setServer42Port(SERVER42_PORT);
+ request.setServer42Vlan(SERVER42_VLAN);
+ request.setServer42MacAddress(SERVER42_MAC_ADDRESS);
+ recordHandler.handleCommand(new CommandMessage(request, 0, CORRELATION_ID));
+
+ verify(switchManager);
+ }
} | ['src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/kafka/RecordHandlerTest.java', 'src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/kafka/RecordHandler.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 9,504,449 | 1,984,049 | 245,840 | 2,578 | 370 | 62 | 4 | 1 | 362 | 35 | 94 | 12 | 0 | 0 | 1970-01-01T00:27:08 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,059 | telstra/open-kilda/4271/4267 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4267 | https://github.com/telstra/open-kilda/pull/4271 | https://github.com/telstra/open-kilda/pull/4271 | 1 | closes | reroute is not working for a 'vxlan' flow | **Steps to reproduce:**
1. find switch pair with two paths at least;
2. create a vxlan flow;
3. break the flow path(init auto reroute)
**Expected result:**
Flow is rerouted to an alternative path.
**Actual result:**
Flow is not rerouted.
Flow is stayed on the same path and moved to the DOWN status.
No error in kibana.
Error in flowHistory:
```
{
"timestamp": 1621864291,
"action": "Failed to reroute the flow",
"details": "InstallNonIngressRulesAction failed: No encapsulation resources found for flow path noQINQ_vxlan_r_3_4a5563c7-e35a-436b-aaee-8a452683fe18 (opposite: noQINQ_vxlan_r_3_382162d3-885b-4dda-aec5-ce0369b36fe7)"
}
```
| 180eb0b397b90cdeded5da366b52de1bacc4d2a8 | 896316451b9b83049b497032ea57b7c11208e599 | https://github.com/telstra/open-kilda/compare/180eb0b397b90cdeded5da366b52de1bacc4d2a8...896316451b9b83049b497032ea57b7c11208e599 | diff --git a/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/service/SpeakerFlowSegmentRequestBuilder.java b/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/service/SpeakerFlowSegmentRequestBuilder.java
index ce72aca06..c204a1a02 100644
--- a/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/service/SpeakerFlowSegmentRequestBuilder.java
+++ b/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/service/SpeakerFlowSegmentRequestBuilder.java
@@ -460,7 +460,8 @@ public class SpeakerFlowSegmentRequestBuilder implements FlowCommandBuilder {
EncapsulationResources resources = resourcesManager
.getEncapsulationResources(pathId, oppositePathId, encapsulation)
.orElseThrow(() -> new IllegalStateException(format(
- "No encapsulation resources found for flow path %s (opposite: %s)", pathId, oppositePathId)));
+ "No %s encapsulation resources found for flow path %s (opposite: %s).",
+ encapsulation, pathId, oppositePathId)));
return new FlowTransitEncapsulation(resources.getTransitEncapsulationId(), resources.getEncapsulationType());
}
}
diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallIngressRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallIngressRulesAction.java
index c887bc704..5edc4cd86 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallIngressRulesAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallIngressRulesAction.java
@@ -18,7 +18,6 @@ package org.openkilda.wfm.topology.flowhs.fsm.reroute.actions;
import org.openkilda.floodlight.api.request.FlowSegmentRequest;
import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory;
import org.openkilda.model.Flow;
-import org.openkilda.model.FlowEncapsulationType;
import org.openkilda.model.FlowPath;
import org.openkilda.persistence.PersistenceManager;
import org.openkilda.wfm.share.flow.resources.FlowResourcesManager;
@@ -52,9 +51,14 @@ public class InstallIngressRulesAction extends FlowProcessingAction<FlowRerouteF
String flowId = stateMachine.getFlowId();
Flow flow = getFlow(flowId);
- FlowEncapsulationType encapsulationType = stateMachine.getNewEncapsulationType() != null
- ? stateMachine.getNewEncapsulationType() : flow.getEncapsulationType();
- FlowCommandBuilder commandBuilder = commandBuilderFactory.getBuilder(encapsulationType);
+ // Detach the entity to avoid propagation to the database.
+ flowRepository.detach(flow);
+ if (stateMachine.getNewEncapsulationType() != null) {
+ // This is for commandBuilder.buildIngressOnly() to use proper (updated) encapsulation type.
+ flow.setEncapsulationType(stateMachine.getNewEncapsulationType());
+ }
+
+ FlowCommandBuilder commandBuilder = commandBuilderFactory.getBuilder(flow.getEncapsulationType());
Collection<FlowSegmentRequestFactory> requestFactories = new ArrayList<>();
if (stateMachine.getNewPrimaryForwardPath() != null && stateMachine.getNewPrimaryReversePath() != null) {
diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallNonIngressRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallNonIngressRulesAction.java
index 25d83bfb0..a3f617c46 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallNonIngressRulesAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallNonIngressRulesAction.java
@@ -18,7 +18,6 @@ package org.openkilda.wfm.topology.flowhs.fsm.reroute.actions;
import org.openkilda.floodlight.api.request.FlowSegmentRequest;
import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory;
import org.openkilda.model.Flow;
-import org.openkilda.model.FlowEncapsulationType;
import org.openkilda.model.FlowPath;
import org.openkilda.persistence.PersistenceManager;
import org.openkilda.wfm.share.flow.resources.FlowResourcesManager;
@@ -52,9 +51,14 @@ public class InstallNonIngressRulesAction extends
String flowId = stateMachine.getFlowId();
Flow flow = getFlow(flowId);
- FlowEncapsulationType encapsulationType = stateMachine.getNewEncapsulationType() != null
- ? stateMachine.getNewEncapsulationType() : flow.getEncapsulationType();
- FlowCommandBuilder commandBuilder = commandBuilderFactory.getBuilder(encapsulationType);
+ // Detach the entity to avoid propagation to the database.
+ flowRepository.detach(flow);
+ if (stateMachine.getNewEncapsulationType() != null) {
+ // This is for commandBuilder.buildAllExceptIngress() to use proper (updated) encapsulation type.
+ flow.setEncapsulationType(stateMachine.getNewEncapsulationType());
+ }
+
+ FlowCommandBuilder commandBuilder = commandBuilderFactory.getBuilder(flow.getEncapsulationType());
Collection<FlowSegmentRequestFactory> requestFactories = new ArrayList<>();
| ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallNonIngressRulesAction.java', 'src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/service/SpeakerFlowSegmentRequestBuilder.java', 'src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/InstallIngressRulesAction.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 8,513,309 | 1,775,669 | 222,746 | 2,400 | 1,910 | 364 | 27 | 3 | 734 | 84 | 208 | 23 | 0 | 1 | 1970-01-01T00:27:02 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,037 | telstra/open-kilda/5090/2660 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/2660 | https://github.com/telstra/open-kilda/pull/5090 | https://github.com/telstra/open-kilda/pull/5090 | 1 | closes | GET Switch Rules is not showing proper rule information in the 'instructiion -> none' field | **Step to reproduce:**
1. get switch rules via NB `GET /v1/switches/{switch-id}/rules`
2. get switch rules via FL http://127.0.0.1:8081/wm/core/switch/00:00:00:00:00:00:00:02/flow/json
3. compare rules from item 1 against item 2;
**Actual result**
FL:
```
instructions: {
none: "drop"
}
```
NB:
```
"instructions": {
"apply_actions": null,
"none": null,
"instruction_goto_meter": null,
"instruction_goto_table": null
},
```
**Expected result:**
The `none` field should be the same | 2157f1f5c41464c63cbb705cf51fcf0b50e06bae | 7ea8356e924970459cc4d8f8c3dd703c3f981441 | https://github.com/telstra/open-kilda/compare/2157f1f5c41464c63cbb705cf51fcf0b50e06bae...7ea8356e924970459cc4d8f8c3dd703c3f981441 | diff --git a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/converter/OfFlowStatsMapper.java b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/converter/OfFlowStatsMapper.java
index ea6160a10..ab3caf116 100644
--- a/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/converter/OfFlowStatsMapper.java
+++ b/src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/converter/OfFlowStatsMapper.java
@@ -32,6 +32,7 @@ import org.openkilda.messaging.info.stats.FlowStatsEntry;
import org.openkilda.model.SwitchId;
import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.collections4.CollectionUtils;
import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
import org.projectfloodlight.openflow.protocol.OFBucket;
@@ -78,6 +79,8 @@ public abstract class OfFlowStatsMapper {
*/
private static final int VLAN_MASK = 0xFFF;
+ private static final String DROP = "drop";
+
/**
* Convert {@link OFFlowStatsEntry} to format that kilda supports {@link FlowEntry}.
* @param entry flow stats to be converted.
@@ -175,16 +178,20 @@ public abstract class OfFlowStatsMapper {
public FlowInstructions toFlowInstructions(final List<OFInstruction> instructions) {
FlowInstructions.FlowInstructionsBuilder flowInstructions = FlowInstructions.builder();
- for (OFInstruction entry : instructions) {
- if (entry instanceof OFInstructionApplyActions) {
- List<OFAction> actions = ((OFInstructionApplyActions) entry).getActions();
- flowInstructions.applyActions(toFlowApplyActions(actions));
- } else if (entry instanceof OFInstructionMeter) {
- flowInstructions.goToMeter(((OFInstructionMeter) entry).getMeterId());
- } else if (entry instanceof OFInstructionGotoTable) {
- flowInstructions.goToTable(((OFInstructionGotoTable) entry).getTableId().getValue());
+ if (CollectionUtils.isEmpty(instructions)) {
+ flowInstructions.none(DROP);
+ } else {
+ for (OFInstruction entry : instructions) {
+ if (entry instanceof OFInstructionApplyActions) {
+ List<OFAction> actions = ((OFInstructionApplyActions) entry).getActions();
+ flowInstructions.applyActions(toFlowApplyActions(actions));
+ } else if (entry instanceof OFInstructionMeter) {
+ flowInstructions.goToMeter(((OFInstructionMeter) entry).getMeterId());
+ } else if (entry instanceof OFInstructionGotoTable) {
+ flowInstructions.goToTable(((OFInstructionGotoTable) entry).getTableId().getValue());
+ }
+ // add handling for other instructions here
}
- // add handling for other instructions here
}
return flowInstructions.build();
diff --git a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/OfFlowStatsMapperTest.java b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/OfFlowStatsMapperTest.java
index 36b3096be..7352d4b52 100644
--- a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/OfFlowStatsMapperTest.java
+++ b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/OfFlowStatsMapperTest.java
@@ -89,6 +89,7 @@ public class OfFlowStatsMapperTest {
public static final TableId goToTable = TableId.of(24);
private static final String MAC_ADDRESS_1 = "01:01:01:01:01:01";
private static final String MAC_ADDRESS_2 = "02:02:02:02:02:02";
+ private static final String NONE_DROP = "drop";
@Test
public void testToFlowStatsData() {
@@ -135,29 +136,32 @@ public class OfFlowStatsMapperTest {
assertEquals(udpSrc.toString(), entry.getMatch().getUdpSrc());
assertEquals(udpDst.toString(), entry.getMatch().getUdpDst());
- FlowSetFieldAction flowSetEthSrcAction = new FlowSetFieldAction("eth_src", MAC_ADDRESS_1);
- FlowSetFieldAction flowSetEthDstAction = new FlowSetFieldAction("eth_dst", MAC_ADDRESS_2);
- FlowCopyFieldAction flowCopyFieldAction = FlowCopyFieldAction.builder()
- .bits(String.valueOf(bits))
- .srcOffset(String.valueOf(srcOffset))
- .dstOffset(String.valueOf(dstOffset))
- .srcOxm(String.valueOf(oxmSrcHeader))
- .dstOxm(String.valueOf(oxmDstHeader))
- .build();
- FlowSwapFieldAction flowSwapFieldAction = FlowSwapFieldAction.builder()
- .bits(String.valueOf(bits))
- .srcOffset(String.valueOf(srcOffset))
- .dstOffset(String.valueOf(dstOffset))
- .srcOxm(String.valueOf(oxmSrcHeader))
- .dstOxm(String.valueOf(oxmDstHeader))
- .build();
- FlowApplyActions applyActions = new FlowApplyActions(port.toString(),
- Lists.newArrayList(flowSetEthSrcAction, flowSetEthDstAction), ethType.toString(), null, null, null,
- group.toString(), flowCopyFieldAction, flowSwapFieldAction);
+
+ FlowApplyActions applyActions = buildFlowApplyActions();
FlowInstructions instructions = new FlowInstructions(applyActions, null, meterId, goToTable.getValue());
assertEquals(instructions, entry.getInstructions());
}
+ @Test
+ public void toFlowNoneEmptyInstructions() {
+ FlowApplyActions applyActions = buildFlowApplyActions();
+ FlowInstructions expectedNoneEmptyFlowInstructions = new FlowInstructions(applyActions,
+ null, meterId, goToTable.getValue());
+ FlowInstructions actualNoneEmptyFlowInstructions
+ = OfFlowStatsMapper.INSTANCE.toFlowInstructions(buildInstruction());
+
+ assertEquals(expectedNoneEmptyFlowInstructions, actualNoneEmptyFlowInstructions);
+ }
+
+ @Test
+ public void toFlowEmptyInstructions() {
+ FlowInstructions expectedEmptyFlowInstructions = FlowInstructions.builder().none(NONE_DROP).build();
+ List<OFInstruction> emptyInstructions = Collections.emptyList();
+ FlowInstructions actualEmptyInstructions = OfFlowStatsMapper.INSTANCE.toFlowInstructions(emptyInstructions);
+
+ assertEquals(expectedEmptyFlowInstructions, actualEmptyInstructions);
+ }
+
@Test
public void testFlowGroupEntry() {
OFGroupDescStatsEntry entry = buildFlowGroupEntry();
@@ -175,6 +179,30 @@ public class OfFlowStatsMapperTest {
}
+ private FlowApplyActions buildFlowApplyActions() {
+ FlowSetFieldAction flowSetEthSrcAction = new FlowSetFieldAction("eth_src", MAC_ADDRESS_1);
+ FlowSetFieldAction flowSetEthDstAction = new FlowSetFieldAction("eth_dst", MAC_ADDRESS_2);
+ FlowCopyFieldAction flowCopyFieldAction = FlowCopyFieldAction.builder()
+ .bits(String.valueOf(bits))
+ .srcOffset(String.valueOf(srcOffset))
+ .dstOffset(String.valueOf(dstOffset))
+ .srcOxm(String.valueOf(oxmSrcHeader))
+ .dstOxm(String.valueOf(oxmDstHeader))
+ .build();
+ FlowSwapFieldAction flowSwapFieldAction = FlowSwapFieldAction.builder()
+ .bits(String.valueOf(bits))
+ .srcOffset(String.valueOf(srcOffset))
+ .dstOffset(String.valueOf(dstOffset))
+ .srcOxm(String.valueOf(oxmSrcHeader))
+ .dstOxm(String.valueOf(oxmDstHeader))
+ .build();
+
+ return new FlowApplyActions(port.toString(),
+ Lists.newArrayList(flowSetEthSrcAction, flowSetEthDstAction),
+ ethType.toString(), null, null, null,
+ group.toString(), flowCopyFieldAction, flowSwapFieldAction);
+ }
+
private OFGroupDescStatsEntry buildFlowGroupEntry() {
return factory.buildGroupDescStatsEntry()
.setGroup(OFGroup.of(22)) | ['src-java/floodlight-service/floodlight-modules/src/main/java/org/openkilda/floodlight/converter/OfFlowStatsMapper.java', 'src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/OfFlowStatsMapperTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,639,604 | 2,431,756 | 298,185 | 3,169 | 1,599 | 289 | 25 | 1 | 519 | 60 | 157 | 25 | 1 | 2 | 1970-01-01T00:27:56 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,046 | telstra/open-kilda/4981/4972 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4972 | https://github.com/telstra/open-kilda/pull/4981 | https://github.com/telstra/open-kilda/pull/4981 | 1 | closes | Can't update one switch Y flow | **Steps to reproduce:**
1. create one switch Y flow
```
{
"y_flow_id": "flow1",
"shared_endpoint": {
"port_number": 10,
"switch_id": "2"
},
"maximum_bandwidth": 10000,
"ignore_bandwidth": true,
"sub_flows": [
{
"flow_id": "sub1",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 123
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 456
}
},
{
"flow_id": "sub2",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 345
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 789
}
}
]
}
```
2. update flow bandwidth via PATCH request
```
{
"maximum_bandwidth": 20000
}
```
**Expected result:**
flow was updated
**Actual result:**
Flow is stuck in "In progress" state | 053438afae1b25a9fccf3492dcac22fcaa3bf4bf | 54427bd470c1ff37ce1c756840b6edff6e22bab5 | https://github.com/telstra/open-kilda/compare/053438afae1b25a9fccf3492dcac22fcaa3bf4bf...54427bd470c1ff37ce1c756840b6edff6e22bab5 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
index 9e2eb265f..89b78109f 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
@@ -127,10 +127,7 @@ public class ValidateYFlowAction extends
format("Unable to map provided sub-flows set onto existing y-flow %s", yFlowId));
}
- YSubFlow subFlow = yFlow.getSubFlows().stream().findAny()
- .orElseThrow(() -> new FlowProcessingException(ErrorType.DATA_INVALID,
- format("No sub-flows of the y-flow %s were found", yFlowId)));
- stateMachine.setMainAffinityFlowId(subFlow.getFlow().getAffinityGroupId());
+ stateMachine.setMainAffinityFlowId(getMainAffinityFlowId(yFlow));
List<FlowEndpoint> subFlowEndpoints = targetFlow.getSubFlows().stream()
.map(SubFlowDto::getEndpoint)
@@ -145,6 +142,24 @@ public class ValidateYFlowAction extends
return Optional.empty();
}
+ private String getMainAffinityFlowId(YFlow yFlow) {
+ // TODO: maybe we should add filtering of one switch sub flows into method getSubFlows()
+ YSubFlow multiSwitchFlows = yFlow.getSubFlows().stream()
+ .filter(sub -> !sub.isOneSwitchYFlow(yFlow.getSharedEndpoint().getSwitchId()))
+ .findAny()
+ .orElse(null);
+ if (multiSwitchFlows != null) {
+ return multiSwitchFlows.getFlow().getAffinityGroupId();
+ } else {
+ // if there is no multi switch flows we have to use one switch flow
+ YSubFlow oneSwitchSubFlow = yFlow.getSubFlows().stream()
+ .findAny()
+ .orElseThrow(() -> new FlowProcessingException(ErrorType.DATA_INVALID,
+ format("No sub-flows of the y-flow %s were found", yFlow.getYFlowId())));
+ return oneSwitchSubFlow.getSubFlowId();
+ }
+ }
+
@Override
protected String getGenericErrorMessage() {
return "Could not update y-flow";
diff --git a/src-java/kilda-model/src/main/java/org/openkilda/model/YSubFlow.java b/src-java/kilda-model/src/main/java/org/openkilda/model/YSubFlow.java
index 461140835..53d21e27f 100644
--- a/src-java/kilda-model/src/main/java/org/openkilda/model/YSubFlow.java
+++ b/src-java/kilda-model/src/main/java/org/openkilda/model/YSubFlow.java
@@ -83,6 +83,10 @@ public class YSubFlow implements CompositeDataEntity<YSubFlowData> {
this.data = builder.build();
}
+ public boolean isOneSwitchYFlow(SwitchId sharedEndpointSwitchId) {
+ return data.getEndpointSwitchId().equals(sharedEndpointSwitchId);
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) { | ['src-java/kilda-model/src/main/java/org/openkilda/model/YSubFlow.java', 'src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,321,323 | 2,363,586 | 289,817 | 3,081 | 1,513 | 332 | 27 | 2 | 979 | 100 | 284 | 53 | 0 | 2 | 1970-01-01T00:27:47 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,045 | telstra/open-kilda/4989/4988 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/4988 | https://github.com/telstra/open-kilda/pull/4989 | https://github.com/telstra/open-kilda/pull/4989 | 1 | closes | Can't update Y flow if one sub flow is one switch and other one is multi switch | **Steps to repoduse:**
1. Create Y flow where A-end == Z-end and B-end != Z-end
Example:
```
{
"y_flow_id": "flow1",
"shared_endpoint": {
"port_number": 10,
"switch_id": "2"
},
"maximum_bandwidth": 10000,
"ignore_bandwidth": true,
"sub_flows": [
{
"flow_id": "sub1",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "9",
"vlan_id": 123
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 456
}
},
{
"flow_id": "sub2",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 345
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 789
}
}
]
}
```
2. Update the flow by PATCH
```
{
"maximum_bandwidth": 20000
}
```
**Expected result:**
Flow was updated
**Actual result:**
Time out
| 142a4b059ba4345cd2063bc344b70eb76c406e9d | eab97bb7ddb95a564c570a2c37cd43269a536772 | https://github.com/telstra/open-kilda/compare/142a4b059ba4345cd2063bc344b70eb76c406e9d...eab97bb7ddb95a564c570a2c37cd43269a536772 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
index 71ae77073..6787f6c07 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java
@@ -145,12 +145,20 @@ public class ValidateYFlowAction extends
private String getMainAffinityFlowId(YFlow yFlow) {
// TODO: maybe we should add filtering of one switch sub flows into method getSubFlows()
- YSubFlow multiSwitchFlows = yFlow.getSubFlows().stream()
+ YSubFlow flowInAffinityGroup = yFlow.getSubFlows().stream()
+ .filter(sub -> sub.getFlow().getAffinityGroupId() != null)
+ .findAny()
+ .orElse(null);
+ if (flowInAffinityGroup != null) {
+ return flowInAffinityGroup.getFlow().getAffinityGroupId();
+ }
+
+ YSubFlow multiSwitchFlow = yFlow.getSubFlows().stream()
.filter(sub -> !sub.isOneSwitchYFlow(yFlow.getSharedEndpoint().getSwitchId()))
.findAny()
.orElse(null);
- if (multiSwitchFlows != null) {
- return multiSwitchFlows.getFlow().getAffinityGroupId();
+ if (multiSwitchFlow != null) {
+ return multiSwitchFlow.getSubFlowId();
} else {
// if there is no multi switch flows we have to use one switch flow
YSubFlow oneSwitchSubFlow = yFlow.getSubFlows().stream() | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/ValidateYFlowAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 11,323,831 | 2,364,156 | 289,861 | 3,081 | 666 | 155 | 14 | 1 | 979 | 101 | 292 | 56 | 0 | 2 | 1970-01-01T00:27:47 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,026 | telstra/open-kilda/5336/5320 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5320 | https://github.com/telstra/open-kilda/pull/5336 | https://github.com/telstra/open-kilda/pull/5336 | 1 | closes | duplicated update event for in ha-flow history | steps to reproduce
1. create ha flow
2. update ha flow
3. get ha flow history
Actual result: ha flow history contains 2 update entries with the same data
Expected result: 1 update event in history
| 686d31220f1033595d7f1d4374544af5ba9c42fe | 7700161ca3ab72f35a32eb9b9e3f8bb0cb4330a0 | https://github.com/telstra/open-kilda/compare/686d31220f1033595d7f1d4374544af5ba9c42fe...7700161ca3ab72f35a32eb9b9e3f8bb0cb4330a0 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/UpdateHaFlowAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/UpdateHaFlowAction.java
index a34cf6a9f..b393e1bcf 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/UpdateHaFlowAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/UpdateHaFlowAction.java
@@ -26,7 +26,6 @@ import org.openkilda.model.HaSubFlow;
import org.openkilda.model.Switch;
import org.openkilda.persistence.PersistenceManager;
import org.openkilda.wfm.error.FlowNotFoundException;
-import org.openkilda.wfm.share.history.model.HaFlowEventData;
import org.openkilda.wfm.topology.flowhs.exception.FlowProcessingException;
import org.openkilda.wfm.topology.flowhs.fsm.common.actions.NbTrackableWithHistorySupportAction;
import org.openkilda.wfm.topology.flowhs.fsm.haflow.update.HaFlowUpdateContext;
@@ -58,7 +57,6 @@ public class UpdateHaFlowAction extends
HaFlow haFlow = getHaFlow(haFlowId);
log.debug("Updating the flow {} with properties: {}", haFlowId, targetHaFlow);
- saveNewEventInHistory(stateMachine, haFlow);
// Complete target ha-flow in FSM with values from original ha-flow
stateMachine.setTargetHaFlow(updateFlow(haFlow, targetHaFlow));
@@ -130,13 +128,4 @@ public class UpdateHaFlowAction extends
protected String getGenericErrorMessage() {
return "Couldn't update HA-flow";
}
-
- private void saveNewEventInHistory(HaFlowUpdateFsm stateMachine, HaFlow haFlow) {
- HaFlowHistoryService.using(stateMachine.getCarrier()).saveNewHaFlowEvent(HaFlowEventData.builder()
- .taskId(stateMachine.getCommandContext().getCorrelationId())
- .action("Update HA-flow")
- .event(HaFlowEventData.Event.UPDATE)
- .haFlowId(stateMachine.getHaFlowId())
- .build());
- }
} | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/UpdateHaFlowAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 12,921,722 | 2,706,146 | 326,493 | 3,388 | 582 | 123 | 11 | 1 | 209 | 36 | 49 | 9 | 0 | 0 | 1970-01-01T00:28:12 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,028 | telstra/open-kilda/5290/5289 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5289 | https://github.com/telstra/open-kilda/pull/5290 | https://github.com/telstra/open-kilda/pull/5290 | 1 | closes | Flow reroute has a wrong message in the flow history | When rerouting a simple flow there is a message in the flow history: HA-flow was validated successfully.
Steps to reproduce:
1. Create a flow with any parameters
2. Execute a reroute for this flow: `/flows/{id}/reroute`
3. Navigate to this flow's history: `/flows/{id}/history`, find the history event "flow rerouting"
actual result: "action": "HA-flow was validated successfully",
expected result: "action": "Flow was validated successfully" | e3ee4889c8f783333291a11eec82137eaa0c062d | d8777f3db86d3fd22f365dacf6096f523efa5185 | https://github.com/telstra/open-kilda/compare/e3ee4889c8f783333291a11eec82137eaa0c062d...d8777f3db86d3fd22f365dacf6096f523efa5185 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/ValidateFlowAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/ValidateFlowAction.java
index e8ac62995..037ec78f7 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/ValidateFlowAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/ValidateFlowAction.java
@@ -166,7 +166,7 @@ public class ValidateFlowAction extends
stateMachine.setAffectedIsls(context.getAffectedIsl());
stateMachine.setIgnoreBandwidth(context.isIgnoreBandwidth());
- stateMachine.saveActionToHistory("HA-flow was validated successfully");
+ stateMachine.saveActionToHistory("The flow has been validated successfully");
return Optional.empty();
} | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/ValidateFlowAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 12,701,537 | 2,659,823 | 321,151 | 3,335 | 167 | 29 | 2 | 1 | 453 | 62 | 103 | 9 | 0 | 0 | 1970-01-01T00:28:09 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,030 | telstra/open-kilda/5262/5249 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5249 | https://github.com/telstra/open-kilda/pull/5262 | https://github.com/telstra/open-kilda/pull/5262 | 1 | closes | HaFlow Create or Update: updateFlowMonitoring throws IllegalArgumentException | Steps to reproduce using the standard test topology:
- create an HA flow with a valid request parameters. For example, use the request below.
Expected result: HA flow is created, no errors in the log
Actual result: there is an error in the logs, however the flow was created successfully.
The same behavior happens when performing an update (PUT). You can use the same request below.
The error:
```
java.lang.IllegalArgumentException: HA-flow ha-flow-1 has no HA-sub flow ha-flow-1-a
```
with the stack trace that includes:
```json
{
"file": "HaFlow.java",
"method": "getHaSubFlowOrThrowException",
"line": 374,
"exact": false,
"location": "stormjar.jar",
"class": "org.openkilda.model.HaFlow",
"version": "?"
},
{
"file": "OnFinishedAction.java",
"method": "updateFlowMonitoring",
"line": 81,
"exact": false,
"location": "stormjar.jar",
"class": "org.openkilda.wfm.topology.flowhs.fsm.haflow.update.actions.OnFinishedAction",
"version": "?"
}
```
An example of the request to create an HA flow:
```json
{
"ha_flow_id": "ha-flow-1",
"status": "In progress",
"shared_endpoint": {
"switch_id": "00:00:00:00:00:00:00:02",
"port_number": 23,
"vlan_id": 0,
"inner_vlan_id": 0
},
"maximum_bandwidth": 0,
"path_computation_strategy": "COST",
"encapsulation_type": "transit_vlan",
"max_latency": 0,
"max_latency_tier2": 0,
"ignore_bandwidth": true,
"periodic_pings": true,
"pinned": false,
"priority": 0,
"strict_bandwidth": false,
"description": "HA flow test description 2",
"allocate_protected_path": false,
"diverse_with_flows": [],
"diverse_with_y_flows": [],
"diverse_with_ha_flows": [],
"sub_flows": [
{
"flow_id": "ha-flow-1-a",
"endpoint": {
"switch_id": "00:00:00:00:00:00:00:09",
"port_number": 22,
"vlan_id": 0,
"inner_vlan_id": 0
},
"status": "In progress",
"description": "the first end point",
"time_create": "2023-06-09T12:08:36.616Z",
"time_update": "2023-06-09T12:08:36.618Z"
},
{
"flow_id": "ha-flow-1-b",
"endpoint": {
"switch_id": "00:00:00:00:00:00:00:07",
"port_number": 21,
"vlan_id": 0,
"inner_vlan_id": 0
},
"status": "In progress",
"description": "the second end point",
"time_create": "2023-06-09T12:08:36.617Z",
"time_update": "2023-06-09T12:08:36.618Z"
}
],
"time_create": "2023-06-09T12:08:36.614Z",
"time_update": "2023-06-09T12:08:37.038Z"
}
``` | 93b300f91c1b7fe07e3d648df6c4e850a4af4507 | 2c7aa73981cc5dbfbbde47ae96a30ddc3682b28c | https://github.com/telstra/open-kilda/compare/93b300f91c1b7fe07e3d648df6c4e850a4af4507...2c7aa73981cc5dbfbbde47ae96a30ddc3682b28c | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/OnFinishedAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/OnFinishedAction.java
index 148ceaf61..b0d870c10 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/OnFinishedAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/OnFinishedAction.java
@@ -31,6 +31,8 @@ import org.openkilda.wfm.topology.flowhs.mapper.HaFlowMapper;
import lombok.extern.slf4j.Slf4j;
+import java.util.stream.Collectors;
+
@Slf4j
public class OnFinishedAction extends HistoryRecordingAction<HaFlowUpdateFsm, State, Event, HaFlowUpdateContext> {
public static final String DEGRADED_FAIL_REASON = "Not all paths meet the SLA";
@@ -67,12 +69,12 @@ public class OnFinishedAction extends HistoryRecordingAction<HaFlowUpdateFsm, St
private void updateFlowMonitoring(HaFlowUpdateFsm stateMachine) {
HaFlow original = stateMachine.getOriginalHaFlow();
- HaFlow target = HaFlowMapper.INSTANCE.toHaFlow(stateMachine.getTargetHaFlow());
+ HaFlow target = mapToTargetHaFlow(stateMachine.getTargetHaFlow());
for (HaSubFlow originalSubFlow : original.getHaSubFlows()) {
HaSubFlow targetSubFlow = target.getHaSubFlowOrThrowException(originalSubFlow.getHaSubFlowId());
boolean originalNotSingle = !originalSubFlow.isOneSwitch();
- boolean targetNotSingle = !targetSubFlow.isOneSwitch();
+ boolean targetNotSingle = !targetSubFlow.isOneSwitch(target.getSharedSwitchId());
boolean srcUpdated = isSrcUpdated(original, target);
boolean dstUpdated = isDstUpdated(originalSubFlow, targetSubFlow);
@@ -86,7 +88,7 @@ public class OnFinishedAction extends HistoryRecordingAction<HaFlowUpdateFsm, St
// setup new if it is not single
//TODO: Review logic during https://github.com/telstra/open-kilda/issues/5208
if (targetNotSingle && (srcUpdated || dstUpdated)) {
- stateMachine.getCarrier().sendActivateFlowMonitoring(null);
+ //stateMachine.getCarrier().sendActivateFlowMonitoring(null);
}
}
}
@@ -104,4 +106,12 @@ public class OnFinishedAction extends HistoryRecordingAction<HaFlowUpdateFsm, St
&& originalSubFlow.getEndpointVlan() == targetSubFlow.getEndpointVlan()
&& originalSubFlow.getEndpointInnerVlan() == targetSubFlow.getEndpointInnerVlan());
}
+
+ private HaFlow mapToTargetHaFlow(HaFlowRequest targetHaFlowRequest) {
+ HaFlow target = HaFlowMapper.INSTANCE.toHaFlow(targetHaFlowRequest);
+ target.setHaSubFlows(targetHaFlowRequest.getSubFlows().stream()
+ .map(HaFlowMapper.INSTANCE::toSubFlow)
+ .collect(Collectors.toSet()));
+ return target;
+ }
}
diff --git a/src-java/kilda-model/src/main/java/org/openkilda/model/HaSubFlow.java b/src-java/kilda-model/src/main/java/org/openkilda/model/HaSubFlow.java
index 9da43de47..11f29faf1 100644
--- a/src-java/kilda-model/src/main/java/org/openkilda/model/HaSubFlow.java
+++ b/src-java/kilda-model/src/main/java/org/openkilda/model/HaSubFlow.java
@@ -128,6 +128,10 @@ public class HaSubFlow implements CompositeDataEntity<HaSubFlowData> {
return getEndpointSwitchId().equals(getHaFlow().getSharedSwitchId());
}
+ public boolean isOneSwitch(SwitchId sharedSwitchId) {
+ return getEndpointSwitchId().equals(sharedSwitchId);
+ }
+
/**
* Defines persistable data of the sub-flow.
*/ | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/update/actions/OnFinishedAction.java', 'src-java/kilda-model/src/main/java/org/openkilda/model/HaSubFlow.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 12,735,549 | 2,667,235 | 322,324 | 3,364 | 1,015 | 206 | 20 | 2 | 2,847 | 243 | 814 | 93 | 0 | 3 | 1970-01-01T00:28:08 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,034 | telstra/open-kilda/5185/5175 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5175 | https://github.com/telstra/open-kilda/pull/5185 | https://github.com/telstra/open-kilda/pull/5185 | 1 | closes | Y-flow get paths response has incorrect diverse flows after update | **Steps to reproduce:**
1. Create a Y flow via `POST /v2/y-flows`
```
{
"y_flow_id": "flow_1",
"shared_endpoint": {
"port_number": 10,
"switch_id": "9"
},
"maximum_bandwidth": 10000,
"ignore_bandwidth": true,
"sub_flows": [
{
"flow_id": "sub_1",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 123
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 456
}
},
{
"flow_id": "sub_2",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 345
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 789
}
}
]
}
```
2. Create another Y flow without diversity via `POST /v2/y-flows`
```
{
"y_flow_id": "flow_2",
"shared_endpoint": {
"port_number": 10,
"switch_id": "8"
},
"maximum_bandwidth": 10000,
"ignore_bandwidth": true,
"sub_flows": [
{
"flow_id": "sub_3",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "3",
"vlan_id": 111
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 222
}
},
{
"flow_id": "sub_4",
"endpoint": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "3",
"vlan_id": 333
},
"shared_endpoint": {
"inner_vlan_id": 0,
"vlan_id": 444
}
}
]
}
```
3. Update the first flow to add diversity flow via `PATCH /v2/y-flows/flow_1`
```
{
"diverse_flow_id": "flow_2"
}
```
4. Get paths of both flows via `GET /v2/y-flows/{y_flow_id}/paths`
**Expected result:**
Each subflow must contain 3 other subflows in diverse group section (at this moment we see expected responses only if second flow was initially created with `"diverse_flow_id": "flow_1"`)
Expected responses are:
`flow_1`
```
{
******
"sub_flow_paths":[
{
"flow_id":"sub_1",
******
"diverse_group":{
"other_flows":[
{
"flowid":"sub_4",
***
},
{
"flowid":"sub_3",
****
},
{
"flowid":"sub_2",
******
}
]
},
{
"flow_id":"sub_2",
*****
"other_flows":[
{
"flowid":"sub_4",
*****
},
{
"flowid":"sub_3",
*******
},
{
"flowid":"sub_1",
*******
}
]
****
}
```
`flow_2`
```
{
*****
"sub_flow_paths":[
{
"flow_id":"sub_4",
*****
"diverse_group":{
"other_flows":[
{
"flowid":"sub_3",
*****
},
{
"flowid":"sub_2",
*****
},
{
"flowid":"sub_1",
*****
}
]
},
},
{
"flow_id":"sub_3",
"diverse_group":{
"other_flows":[
{
"flowid":"sub_4",
*****
},
{
"flowid":"sub_2",
*****
},
{
"flowid":"sub_1",
*****
}
]
},
*****
}
]
}
```
**Actual result**
`flow_1`
```
{
****
"sub_flow_paths":[
{
"flow_id":"sub_1",
*****
"diverse_group":{
"other_flows":[
{
"flowid":"sub_4",
****
},
{
"flowid":"sub_3",
****
}
<<<<<<<<<<<< no info about sub_2
]
},
"protected_path":{
}
},
{
"flow_id":"sub_2",
****
<<<<<<<<<<< no diverse_group at all
}
]
}
```
`flow_2`
```
{
*****
"sub_flow_paths":[
{
"flow_id":"sub_3",
*****
"diverse_group":{
"other_flows":[
{
"flowid":"sub_1",
****
},
{
"flowid":"sub_4",
*****
} <<<<<< no info about sub_2
]
},
},
{
"flow_id":"sub_4",
"diverse_group":{
"other_flows":[
{
"flowid":"sub_1",
****
},
{
"flowid":"sub_3",
****
} <<<<<< no info about sub_2
]
},
}
]
}
``` | 26e8a99768a7f01580751ca333642ceacfcb3062 | bf7bcc3737b95b4c2d26a1ea1abecc42878ae3f3 | https://github.com/telstra/open-kilda/compare/26e8a99768a7f01580751ca333642ceacfcb3062...bf7bcc3737b95b4c2d26a1ea1abecc42878ae3f3 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/OnSubFlowUpdatedAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/OnSubFlowUpdatedAction.java
index 75437cd0a..633848c17 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/OnSubFlowUpdatedAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/OnSubFlowUpdatedAction.java
@@ -59,6 +59,7 @@ public class OnSubFlowUpdatedAction extends
stateMachine.notifyEventListeners(listener ->
listener.onSubFlowProcessingStart(yFlowId, requestedFlowId));
CommandContext flowContext = stateMachine.getCommandContext().fork(requestedFlowId);
+ requestedFlow.setDiverseFlowId(stateMachine.getDiverseFlowId());
flowUpdateService.startFlowUpdating(flowContext, requestedFlow, yFlowId);
}
}); | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/yflow/update/actions/OnSubFlowUpdatedAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 12,129,336 | 2,535,048 | 309,185 | 3,253 | 85 | 15 | 1 | 1 | 4,601 | 392 | 1,290 | 269 | 0 | 7 | 1970-01-01T00:28:03 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,035 | telstra/open-kilda/5166/5165 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5165 | https://github.com/telstra/open-kilda/pull/5166 | https://github.com/telstra/open-kilda/pull/5166 | 1 | closes | HA-flow protected path is equal to main path | **Steps to reproduce:**
1. Create ha flow with protected path via `POST /v2/ha-flows` with enabled protected path
2. check that main ha-flow path and protected path does not intersect
You can do it by `GET /v2/ha-flows/{ha_flow_id}/paths` when issue https://github.com/telstra/open-kilda/issues/5148 will be ready or you can check it directly in orient DB
**Expected result:**
Paths do not intersect (excluding first and last switch)
**Actual result:**
paths are equal | a345c686bd0f88526c2b9c1a4a9bb7c29c7808d3 | 3d0ca4808dcac4cc670f5bfbf5b5df4b87912523 | https://github.com/telstra/open-kilda/compare/a345c686bd0f88526c2b9c1a4a9bb7c29c7808d3...3d0ca4808dcac4cc670f5bfbf5b5df4b87912523 | diff --git a/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilder.java b/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilder.java
index 85b95d3f2..3fd3d4eec 100644
--- a/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilder.java
+++ b/src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilder.java
@@ -89,22 +89,31 @@ public class FlowPathBuilder {
* @param flowPath the flow path to evaluate.
*/
public boolean arePathsOverlapped(Path path, FlowPath flowPath) {
- Set<Segment> pathSegments = path.getSegments().stream()
- .map(segment -> segment.toBuilder().latency(0).build())
- .collect(Collectors.toSet());
- Set<Segment> flowSegments = flowPath.getSegments().stream()
- .map(segment -> Segment.builder()
- .srcSwitchId(segment.getSrcSwitchId())
- .srcPort(segment.getSrcPort())
- .destSwitchId(segment.getDestSwitchId())
- .destPort(segment.getDestPort())
- .latency(0)
- .build())
- .collect(Collectors.toSet());
+ Set<Segment> pathSegments = buildSegmentSet(path);
+ Set<Segment> flowSegments = buildSegmentSet(flowPath);
return !Sets.intersection(pathSegments, flowSegments).isEmpty();
}
+ /**
+ * Check whether the ha-path and ha-flow path overlap.
+ *
+ * @param haPath the ha-path to evaluate.
+ * @param haFlowPath the ha-flow path to evaluate.
+ */
+ public boolean arePathsOverlapped(HaPath haPath, HaFlowPath haFlowPath) {
+ for (Path subPath : haPath.getSubPaths()) {
+ Set<Segment> haPathSegments = buildSegmentSet(subPath);
+ for (FlowPath haFlowSubPath : haFlowPath.getSubPaths()) {
+ Set<Segment> haFlowSegments = buildSegmentSet(haFlowSubPath);
+ if (!Sets.intersection(haPathSegments, haFlowSegments).isEmpty()) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
/**
* Build a flow path entity for the flow using provided path and resources.
*
@@ -304,6 +313,28 @@ public class FlowPathBuilder {
return result;
}
+ private static Set<Segment> buildSegmentSet(FlowPath flowPath) {
+ return flowPath.getSegments().stream()
+ .map(segment -> Segment.builder()
+ .srcSwitchId(segment.getSrcSwitchId())
+ .srcPort(segment.getSrcPort())
+ .destSwitchId(segment.getDestSwitchId())
+ .destPort(segment.getDestPort())
+ .build())
+ .collect(Collectors.toSet());
+ }
+
+ private static Set<Segment> buildSegmentSet(Path path) {
+ return path.getSegments().stream()
+ .map(segment -> Segment.builder()
+ .srcSwitchId(segment.getSrcSwitchId())
+ .srcPort(segment.getSrcPort())
+ .destSwitchId(segment.getDestSwitchId())
+ .destPort(segment.getDestPort())
+ .build())
+ .collect(Collectors.toSet());
+ }
+
private LazyMap<SwitchId, SwitchProperties> getSwitchProperties(PathId pathId) {
return LazyMap.lazyMap(new HashMap<>(), switchId ->
switchPropertiesRepository.findBySwitchId(switchId)
diff --git a/src-java/base-topology/base-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilderTest.java b/src-java/base-topology/base-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilderTest.java
index 384290cc2..be2a9df46 100644
--- a/src-java/base-topology/base-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilderTest.java
+++ b/src-java/base-topology/base-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilderTest.java
@@ -16,12 +16,10 @@
package org.openkilda.wfm.topology.flowhs.service;
import static java.util.Arrays.asList;
+import static java.util.Collections.singletonList;
import static junit.framework.TestCase.assertTrue;
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -29,6 +27,8 @@ import static org.mockito.Mockito.when;
import org.openkilda.model.Flow;
import org.openkilda.model.FlowPath;
import org.openkilda.model.FlowPathDirection;
+import org.openkilda.model.HaFlowPath;
+import org.openkilda.model.HaSubFlow;
import org.openkilda.model.MeterId;
import org.openkilda.model.PathId;
import org.openkilda.model.PathSegment;
@@ -36,6 +36,7 @@ import org.openkilda.model.Switch;
import org.openkilda.model.SwitchId;
import org.openkilda.model.SwitchProperties;
import org.openkilda.model.cookie.FlowSegmentCookie;
+import org.openkilda.pce.HaPath;
import org.openkilda.pce.Path;
import org.openkilda.pce.Path.Segment;
import org.openkilda.persistence.repositories.KildaConfigurationRepository;
@@ -43,6 +44,7 @@ import org.openkilda.persistence.repositories.SwitchPropertiesRepository;
import org.openkilda.persistence.repositories.SwitchRepository;
import org.openkilda.wfm.share.flow.resources.FlowResources.PathResources;
+import com.google.common.collect.ImmutableList;
import org.junit.Before;
import org.junit.Test;
@@ -50,6 +52,15 @@ import java.util.Collections;
import java.util.Optional;
public class FlowPathBuilderTest {
+ private static final SwitchId SWITCH_ID_1 = new SwitchId(1);
+ private static final SwitchId SWITCH_ID_2 = new SwitchId(2);
+ private static final SwitchId SWITCH_ID_3 = new SwitchId(3);
+ private static final Switch SWITCH_1 = Switch.builder().switchId(SWITCH_ID_1).build();
+ private static final Switch SWITCH_2 = Switch.builder().switchId(SWITCH_ID_2).build();
+ private static final Switch SWITCH_3 = Switch.builder().switchId(SWITCH_ID_3).build();
+ private static final PathId PATH_ID_1 = new PathId("test_path_id");
+ private static final String HA_SUB_FLOW = "ha_sub_flow";
+
private FlowPathBuilder builder;
@Before
@@ -68,7 +79,7 @@ public class FlowPathBuilderTest {
}
@Test
- public void shouldDetectSameSwitchPaths() {
+ public void detectSameSwitchPaths() {
SwitchId switchId = new SwitchId(1);
Switch switchEntity = Switch.builder().switchId(switchId).build();
@@ -88,21 +99,16 @@ public class FlowPathBuilderTest {
}
@Test
- public void shouldDetectNotSameSwitchPaths() {
- SwitchId switchId1 = new SwitchId(1);
-
- SwitchId switchId2 = new SwitchId(2);
- Switch switch2 = Switch.builder().switchId(switchId2).build();
-
+ public void detectNotSameSwitchPaths() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId1)
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_1)
.segments(Collections.emptyList())
.build();
FlowPath flowPath = FlowPath.builder()
- .srcSwitch(switch2)
- .destSwitch(switch2)
+ .srcSwitch(SWITCH_2)
+ .destSwitch(SWITCH_2)
.pathId(new PathId("test_path_id"))
.build();
@@ -110,126 +116,78 @@ public class FlowPathBuilderTest {
}
@Test
- public void shouldDetectSame2SwitchPaths() {
- SwitchId switchId1 = new SwitchId(1);
- Switch switch1 = Switch.builder().switchId(switchId1).build();
-
- SwitchId switchId2 = new SwitchId(2);
- Switch switch2 = Switch.builder().switchId(switchId2).build();
-
+ public void detectSame2SwitchPaths() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId2)
- .segments(Collections.singletonList(Segment.builder()
- .srcSwitchId(switchId1)
- .srcPort(1)
- .destSwitchId(switchId2)
- .destPort(2)
- .build()))
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_2)
+ .segments(singletonList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2)))
.build();
PathId flowPathId = new PathId("test_path_id");
FlowPath flowPath = FlowPath.builder()
- .srcSwitch(switch1)
- .destSwitch(switch2)
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
.pathId(flowPathId)
- .segments(Collections.singletonList(PathSegment.builder()
- .pathId(flowPathId)
- .srcSwitch(switch1).srcPort(1).destSwitch(switch2).destPort(2).build()))
+ .segments(singletonList(buildPathSegment(flowPathId, SWITCH_1, 1, SWITCH_2, 2)))
.build();
assertTrue(builder.isSamePath(path, flowPath));
}
@Test
- public void shouldDetectDifferenceInPortsFor2SwitchPaths() {
- SwitchId switchId1 = new SwitchId(1);
- Switch switch1 = Switch.builder().switchId(switchId1).build();
-
- SwitchId switchId2 = new SwitchId(2);
- Switch switch2 = Switch.builder().switchId(switchId2).build();
-
+ public void detectDifferenceInPortsFor2SwitchPaths() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId2)
- .segments(Collections.singletonList(Segment.builder()
- .srcSwitchId(switchId1)
- .srcPort(1)
- .destSwitchId(switchId2)
- .destPort(2)
- .build()))
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_2)
+ .segments(singletonList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2)))
.build();
PathId flowPathId = new PathId("test_path_id");
FlowPath flowPath = FlowPath.builder()
- .srcSwitch(switch1)
- .destSwitch(switch2)
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
.pathId(flowPathId)
- .segments(Collections.singletonList(PathSegment.builder()
+ .segments(singletonList(PathSegment.builder()
.pathId(flowPathId)
- .srcSwitch(switch1).srcPort(2).destSwitch(switch2).destPort(3).build()))
+ .srcSwitch(SWITCH_1).srcPort(2).destSwitch(SWITCH_2).destPort(3).build()))
.build();
assertFalse(builder.isSamePath(path, flowPath));
}
@Test
- public void shouldDetectSame3SwitchPaths() {
- SwitchId switchId1 = new SwitchId(1);
- Switch switch1 = Switch.builder().switchId(switchId1).build();
-
- SwitchId switchId2 = new SwitchId(2);
- Switch switch2 = Switch.builder().switchId(switchId2).build();
-
- SwitchId switchId3 = new SwitchId(3);
- Switch switch3 = Switch.builder().switchId(switchId3).build();
-
+ public void detectSame3SwitchPaths() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId2)
- .segments(asList(Segment.builder()
- .srcSwitchId(switchId1)
- .srcPort(1)
- .destSwitchId(switchId3)
- .destPort(2)
- .build(), Segment.builder()
- .srcSwitchId(switchId3)
- .srcPort(1)
- .destSwitchId(switchId2)
- .destPort(2)
- .build()))
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_2)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_3, 2),
+ buildSegment(SWITCH_ID_3, 1, SWITCH_ID_2, 2)))
.build();
PathId flowPathId = new PathId("test_path_id");
FlowPath flowPath = FlowPath.builder()
- .srcSwitch(switch1)
- .destSwitch(switch2)
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
.pathId(flowPathId)
.segments(asList(
PathSegment.builder().pathId(flowPathId)
- .srcSwitch(switch1).srcPort(1).destSwitch(switch3).destPort(2).build(),
+ .srcSwitch(SWITCH_1).srcPort(1).destSwitch(SWITCH_3).destPort(2).build(),
PathSegment.builder().pathId(flowPathId)
- .srcSwitch(switch3).srcPort(1).destSwitch(switch2).destPort(2).build()))
+ .srcSwitch(SWITCH_3).srcPort(1).destSwitch(SWITCH_2).destPort(2).build()))
.build();
assertTrue(builder.isSamePath(path, flowPath));
}
@Test
- public void shouldBuildFlowPathFor1SwitchPath() {
- SwitchId switchId = new SwitchId(1);
-
+ public void buildFlowPathFor1SwitchPath() {
Path path = Path.builder()
- .srcSwitchId(switchId)
- .destSwitchId(switchId)
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_1)
.segments(Collections.emptyList())
.build();
- Flow flow = Flow.builder()
- .flowId("test_flow")
- .srcSwitch(Switch.builder().switchId(switchId).build())
- .destSwitch(Switch.builder().switchId(switchId).build())
- .build();
+ Flow flow = buildFlow(SWITCH_1, SWITCH_1);
PathId pathId = new PathId("test_path_id");
MeterId meterId = new MeterId(MeterId.MIN_FLOW_METER_ID);
PathResources pathResources = PathResources.builder()
@@ -243,30 +201,18 @@ public class FlowPathBuilderTest {
assertEquals(pathId, flowPath.getPathId());
assertEquals(meterId, flowPath.getMeterId());
assertEquals(cookie, flowPath.getCookie());
- assertThat(flowPath.getSegments(), empty());
+ assertEquals(0, flowPath.getSegments().size());
}
@Test
- public void shouldBuildFlowPathFor2SwitchPath() {
- SwitchId switchId1 = new SwitchId(1);
- SwitchId switchId2 = new SwitchId(2);
-
+ public void buildFlowPathFor2SwitchPath() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId2)
- .segments(Collections.singletonList(Segment.builder()
- .srcSwitchId(switchId1)
- .srcPort(1)
- .destSwitchId(switchId2)
- .destPort(2)
- .build()))
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_2)
+ .segments(singletonList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 3)))
.build();
- Flow flow = Flow.builder()
- .flowId("test_flow")
- .srcSwitch(Switch.builder().switchId(switchId1).build())
- .destSwitch(Switch.builder().switchId(switchId2).build())
- .build();
+ Flow flow = buildFlow(SWITCH_1, SWITCH_2);
PathId pathId = new PathId("test_path_id");
MeterId meterId = new MeterId(MeterId.MIN_FLOW_METER_ID);
PathResources pathResources = PathResources.builder()
@@ -277,56 +223,155 @@ public class FlowPathBuilderTest {
FlowPath flowPath = builder.buildFlowPath(flow, pathResources, path, cookie, false, flow.getFlowId());
- assertEquals(switchId1, flowPath.getSrcSwitchId());
- assertEquals(switchId2, flowPath.getDestSwitchId());
+ assertEquals(SWITCH_ID_1, flowPath.getSrcSwitchId());
+ assertEquals(SWITCH_ID_2, flowPath.getDestSwitchId());
assertEquals(pathId, flowPath.getPathId());
assertEquals(meterId, flowPath.getMeterId());
assertEquals(cookie, flowPath.getCookie());
- assertThat(flowPath.getSegments(), hasSize(1));
+ assertEquals(1, flowPath.getSegments().size());
}
@Test
- public void shouldBuildFlowPathFor3SwitchPath() {
- SwitchId switchId1 = new SwitchId(1);
- SwitchId switchId2 = new SwitchId(2);
- SwitchId switchId3 = new SwitchId(3);
-
+ public void buildFlowPathFor3SwitchPath() {
Path path = Path.builder()
- .srcSwitchId(switchId1)
- .destSwitchId(switchId2)
- .segments(asList(Segment.builder()
- .srcSwitchId(switchId1)
- .srcPort(1)
- .destSwitchId(switchId3)
- .destPort(2)
- .build(), Segment.builder()
- .srcSwitchId(switchId3)
- .srcPort(1)
- .destSwitchId(switchId2)
- .destPort(2)
- .build()))
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_2)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_3, 2),
+ buildSegment(SWITCH_ID_3, 1, SWITCH_ID_2, 2)))
.build();
- Flow flow = Flow.builder()
- .flowId("test_flow")
- .srcSwitch(Switch.builder().switchId(switchId1).build())
- .destSwitch(Switch.builder().switchId(switchId2).build())
- .build();
- PathId pathId = new PathId("test_path_id");
+ Flow flow = buildFlow(SWITCH_1, SWITCH_2);
MeterId meterId = new MeterId(MeterId.MIN_FLOW_METER_ID);
PathResources pathResources = PathResources.builder()
- .pathId(pathId)
+ .pathId(PATH_ID_1)
.meterId(meterId)
.build();
FlowSegmentCookie cookie = new FlowSegmentCookie(FlowPathDirection.FORWARD, 1);
FlowPath flowPath = builder.buildFlowPath(flow, pathResources, path, cookie, false, flow.getFlowId());
- assertEquals(switchId1, flowPath.getSrcSwitchId());
- assertEquals(switchId2, flowPath.getDestSwitchId());
- assertEquals(pathId, flowPath.getPathId());
+ assertEquals(SWITCH_ID_1, flowPath.getSrcSwitchId());
+ assertEquals(SWITCH_ID_2, flowPath.getDestSwitchId());
+ assertEquals(PATH_ID_1, flowPath.getPathId());
assertEquals(meterId, flowPath.getMeterId());
assertEquals(cookie, flowPath.getCookie());
- assertThat(flowPath.getSegments(), hasSize(2));
+ assertEquals(2, flowPath.getSegments().size());
+ }
+
+ @Test
+ public void flowPathsOverlapped() {
+ Path path = Path.builder()
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_3)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2),
+ buildSegment(SWITCH_ID_2, 3, SWITCH_ID_2, 4)))
+ .build();
+
+ FlowPath flowPath = FlowPath.builder()
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
+ .pathId(PATH_ID_1)
+ .segments(ImmutableList.of(buildPathSegment(PATH_ID_1, SWITCH_1, 1, SWITCH_2, 2)))
+ .build();
+
+ assertTrue(builder.arePathsOverlapped(path, flowPath));
+ }
+
+ @Test
+ public void flowPathsNotOverlapped() {
+ Path path = Path.builder()
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_3)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2),
+ buildSegment(SWITCH_ID_2, 3, SWITCH_ID_2, 4)))
+ .build();
+
+ FlowPath flowPath = FlowPath.builder()
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
+ .pathId(PATH_ID_1)
+ .segments(ImmutableList.of(buildPathSegment(PATH_ID_1, SWITCH_1, 7, SWITCH_2, 8)))
+ .build();
+
+ assertFalse(builder.arePathsOverlapped(path, flowPath));
+ }
+
+ @Test
+ public void haFlowPathsOverlapped() {
+ Path path = Path.builder()
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_3)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2),
+ buildSegment(SWITCH_ID_2, 3, SWITCH_ID_2, 4)))
+ .build();
+
+ HaPath haPath = HaPath.builder().sharedSwitchId(SWITCH_ID_1).yPointSwitchId(SWITCH_ID_2)
+ .subPaths(singletonList(path)).build();
+ FlowPath subPath = FlowPath.builder()
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
+ .pathId(PATH_ID_1)
+ .segments(singletonList(buildPathSegment(PATH_ID_1, SWITCH_1, 1, SWITCH_2, 2)))
+ .build();
+ HaSubFlow haSubFlow = HaSubFlow.builder().haSubFlowId(HA_SUB_FLOW).endpointSwitch(SWITCH_2).build();
+ subPath.setHaSubFlow(haSubFlow);
+
+ HaFlowPath haFlowPath = HaFlowPath.builder().sharedSwitch(SWITCH_1).haPathId(PATH_ID_1).build();
+ haFlowPath.setSubPaths(singletonList(subPath));
+ assertTrue(builder.arePathsOverlapped(haPath, haFlowPath));
+ }
+
+ @Test
+ public void haFlowPathsNotOverlapped() {
+ Path path = Path.builder()
+ .srcSwitchId(SWITCH_ID_1)
+ .destSwitchId(SWITCH_ID_3)
+ .segments(asList(buildSegment(SWITCH_ID_1, 1, SWITCH_ID_2, 2),
+ buildSegment(SWITCH_ID_2, 3, SWITCH_ID_2, 4)))
+ .build();
+
+ HaPath haPath = HaPath.builder().sharedSwitchId(SWITCH_ID_1).yPointSwitchId(SWITCH_ID_2)
+ .subPaths(singletonList(path)).build();
+ FlowPath subPath = FlowPath.builder()
+ .srcSwitch(SWITCH_1)
+ .destSwitch(SWITCH_2)
+ .pathId(PATH_ID_1)
+ .segments(singletonList(buildPathSegment(PATH_ID_1, SWITCH_1, 8, SWITCH_2, 9)))
+ .build();
+ HaSubFlow haSubFlow = HaSubFlow.builder().haSubFlowId(HA_SUB_FLOW).endpointSwitch(SWITCH_2).build();
+ subPath.setHaSubFlow(haSubFlow);
+
+ HaFlowPath haFlowPath = HaFlowPath.builder().sharedSwitch(SWITCH_1).haPathId(PATH_ID_1).build();
+ haFlowPath.setSubPaths(singletonList(subPath));
+
+ assertFalse(builder.arePathsOverlapped(haPath, haFlowPath));
+ }
+
+ private static Flow buildFlow(Switch srcSwitch, Switch dstSwitch) {
+ return Flow.builder()
+ .flowId("test_flow")
+ .srcSwitch(srcSwitch)
+ .destSwitch(dstSwitch)
+ .build();
+ }
+
+ private static PathSegment buildPathSegment(
+ PathId flowPathId, Switch srcSwitch, int srcPort, Switch dstSwitch, int dstPort) {
+ return PathSegment.builder()
+ .pathId(flowPathId)
+ .srcSwitch(srcSwitch)
+ .srcPort(srcPort)
+ .destSwitch(dstSwitch)
+ .destPort(dstPort)
+ .build();
+ }
+
+ private static Segment buildSegment(SwitchId srcSwitchId, int srcPort, SwitchId dstSwitchId, int dstPort) {
+ return Segment.builder()
+ .srcSwitchId(srcSwitchId)
+ .srcPort(srcPort)
+ .destSwitchId(dstSwitchId)
+ .destPort(dstPort)
+ .build();
}
}
diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/create/actions/ResourcesAllocationAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/create/actions/ResourcesAllocationAction.java
index 0d62bfbbd..6267af024 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/create/actions/ResourcesAllocationAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/create/actions/ResourcesAllocationAction.java
@@ -257,7 +257,14 @@ public class ResourcesAllocationAction extends
GetHaPathsResult protectedPaths = pathComputer.getHaPath(tmpFlow, true);
stateMachine.setBackUpProtectedPathComputationWayUsed(protectedPaths.isBackUpPathComputationWayUsed());
- //TODO check protected overlapping?
+ boolean overlappingProtectedPathFound =
+ flowPathBuilder.arePathsOverlapped(protectedPaths.getForward(), tmpFlow.getForwardPath())
+ || flowPathBuilder.arePathsOverlapped(protectedPaths.getReverse(), tmpFlow.getReversePath());
+ if (overlappingProtectedPathFound) {
+ log.info("Couldn't find non overlapping protected path. Result ha-flow state: {}", tmpFlow);
+ throw new UnroutableFlowException("Couldn't find non overlapping protected path", tmpFlow.getHaFlowId());
+ }
+
log.debug("Creating the protected path {} for flow {}", protectedPaths, tmpFlow);
transactionManager.doInTransaction(() -> {
diff --git a/src-java/kilda-persistence-orientdb/src/main/java/org/openkilda/persistence/orientdb/repositories/OrientDbFlowPathRepository.java b/src-java/kilda-persistence-orientdb/src/main/java/org/openkilda/persistence/orientdb/repositories/OrientDbFlowPathRepository.java
index 60762fc0b..5e5791e32 100644
--- a/src-java/kilda-persistence-orientdb/src/main/java/org/openkilda/persistence/orientdb/repositories/OrientDbFlowPathRepository.java
+++ b/src-java/kilda-persistence-orientdb/src/main/java/org/openkilda/persistence/orientdb/repositories/OrientDbFlowPathRepository.java
@@ -20,6 +20,8 @@ import static java.lang.String.format;
import org.openkilda.model.PathId;
import org.openkilda.persistence.ferma.frames.FlowFrame;
import org.openkilda.persistence.ferma.frames.FlowPathFrame;
+import org.openkilda.persistence.ferma.frames.HaFlowFrame;
+import org.openkilda.persistence.ferma.frames.HaFlowPathFrame;
import org.openkilda.persistence.ferma.frames.converters.PathIdConverter;
import org.openkilda.persistence.ferma.repositories.FermaFlowPathRepository;
import org.openkilda.persistence.orientdb.OrientDbPersistenceImplementation;
@@ -28,6 +30,8 @@ import org.openkilda.persistence.repositories.FlowPathRepository;
import org.apache.tinkerpop.gremlin.orientdb.executor.OGremlinResultSet;
import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
import java.util.stream.Collectors;
/**
@@ -43,12 +47,18 @@ public class OrientDbFlowPathRepository extends FermaFlowPathRepository {
@Override
public Collection<PathId> findPathIdsByFlowDiverseGroupId(String flowDiverseGroupId) {
- return findPathIdsByFlowGroupId(FlowFrame.DIVERSE_GROUP_ID_PROPERTY, flowDiverseGroupId);
+ Set<PathId> pathIds = new HashSet<>(
+ findPathIdsByFlowGroupId(FlowFrame.DIVERSE_GROUP_ID_PROPERTY, flowDiverseGroupId));
+ pathIds.addAll(findPathIdsByHaFlowGroupId(HaFlowFrame.DIVERSE_GROUP_ID_PROPERTY, flowDiverseGroupId));
+ return pathIds;
}
@Override
public Collection<PathId> findPathIdsByFlowAffinityGroupId(String flowAffinityGroupId) {
- return findPathIdsByFlowGroupId(FlowFrame.AFFINITY_GROUP_ID_PROPERTY, flowAffinityGroupId);
+ Set<PathId> pathIds = new HashSet<>(
+ findPathIdsByFlowGroupId(FlowFrame.AFFINITY_GROUP_ID_PROPERTY, flowAffinityGroupId));
+ pathIds.addAll(findPathIdsByHaFlowGroupId(HaFlowFrame.AFFINITY_GROUP_ID_PROPERTY, flowAffinityGroupId));
+ return pathIds;
}
private Collection<PathId> findPathIdsByFlowGroupId(String groupIdProperty, String flowGroupId) {
@@ -63,6 +73,18 @@ public class OrientDbFlowPathRepository extends FermaFlowPathRepository {
}
}
+ private Collection<PathId> findPathIdsByHaFlowGroupId(String groupIdProperty, String flowGroupId) {
+ try (OGremlinResultSet results = graphSupplier.get().querySql(
+ format("SELECT %s FROM %s WHERE in('%s').in('%s').%s = ?",
+ FlowPathFrame.PATH_ID_PROPERTY, FlowPathFrame.FRAME_LABEL,
+ HaFlowPathFrame.OWNS_PATH_EDGE, HaFlowFrame.OWNS_PATHS_EDGE, groupIdProperty), flowGroupId)) {
+ return results.stream()
+ .map(r -> r.getProperty(FlowPathFrame.PATH_ID_PROPERTY))
+ .map(pathId -> PathIdConverter.INSTANCE.toEntityAttribute((String) pathId))
+ .collect(Collectors.toList());
+ }
+ }
+
/*TODO: this need to be reimplemented to work with PathSegmentFrame.SHARED_BANDWIDTH_GROUP_ID_PROPERTY
@Override
protected long getUsedBandwidthBetweenEndpoints(FramedGraph framedGraph,
diff --git a/src-java/kilda-persistence-tinkerpop/src/main/java/org/openkilda/persistence/ferma/repositories/FermaFlowPathRepository.java b/src-java/kilda-persistence-tinkerpop/src/main/java/org/openkilda/persistence/ferma/repositories/FermaFlowPathRepository.java
index 9f0d65a0e..fd35afd44 100644
--- a/src-java/kilda-persistence-tinkerpop/src/main/java/org/openkilda/persistence/ferma/repositories/FermaFlowPathRepository.java
+++ b/src-java/kilda-persistence-tinkerpop/src/main/java/org/openkilda/persistence/ferma/repositories/FermaFlowPathRepository.java
@@ -50,6 +50,7 @@ import org.apache.tinkerpop.gremlin.structure.Column;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -198,7 +199,7 @@ public class FermaFlowPathRepository extends FermaGenericRepository<FlowPath, Fl
@Override
public Collection<PathId> findPathIdsByFlowAffinityGroupId(String flowAffinityGroupId) {
- List<PathId> pathIds = new ArrayList<>(findPathIdsByFlowGroupId(
+ Set<PathId> pathIds = new HashSet<>(findPathIdsByFlowGroupId(
FlowFrame.AFFINITY_GROUP_ID_PROPERTY, flowAffinityGroupId));
pathIds.addAll(findPathIdsByHaFlowGroupId(HaFlowFrame.AFFINITY_GROUP_ID_PROPERTY, flowAffinityGroupId));
return pathIds; | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/haflow/create/actions/ResourcesAllocationAction.java', 'src-java/kilda-persistence-tinkerpop/src/main/java/org/openkilda/persistence/ferma/repositories/FermaFlowPathRepository.java', 'src-java/kilda-persistence-orientdb/src/main/java/org/openkilda/persistence/orientdb/repositories/OrientDbFlowPathRepository.java', 'src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilder.java', 'src-java/base-topology/base-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathBuilderTest.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 12,101,173 | 2,529,364 | 308,526 | 3,247 | 4,983 | 1,014 | 93 | 4 | 481 | 67 | 120 | 10 | 1 | 0 | 1970-01-01T00:28:01 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,036 | telstra/open-kilda/5091/5084 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5084 | https://github.com/telstra/open-kilda/pull/5091 | https://github.com/telstra/open-kilda/pull/5091 | 1 | closes | Flow updated after failed PATCH request | **Steps to reproduce:**
1. Create one switch flow with 0 vlan (or vlans)
```
{
"flow_id": "flow1",
"source": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "2",
"vlan_id": 0
},
"destination": {
"inner_vlan_id": 0,
"port_number": 11,
"switch_id": "2",
"vlan_id": 123
},
"maximum_bandwidth": 10000
}
```
2. Patch the from with `allocate_protected_path: true` and stat vlans
API `PATCH /v2/flows/{flow_id}`
```
{
"allocate_protected_path": true,
"statistics": {
"vlans": [
123, 456
]
}
}
```
**Expected result:**
1. Bad Request
2. Flow would **not** be updated
**Actual Result:**
1. Bad Request
2. Flow has updated stat vlans and status info with error | 2157f1f5c41464c63cbb705cf51fcf0b50e06bae | 085616a2db7dad9f4492636b499d623c6f907d61 | https://github.com/telstra/open-kilda/compare/2157f1f5c41464c63cbb705cf51fcf0b50e06bae...085616a2db7dad9f4492636b499d623c6f907d61 | diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
index 3ec36f4bd..ecb92f2a2 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
@@ -587,6 +587,26 @@ public class FlowOperationsService {
+ "destination vlan_id to zero");
}
}
+
+ if (isProtectedPathNeedToBeAllocated(flowPatch, flow) && isOneSwitchFlow(flowPatch, flow)) {
+ throw new IllegalArgumentException("Can not allocate protected path for one switch flow");
+ }
+ }
+
+ private boolean isProtectedPathNeedToBeAllocated(FlowPatch flowPatch, Flow flow) {
+ if (flowPatch.getAllocateProtectedPath() == null) {
+ return flow.isAllocateProtectedPath();
+ } else {
+ return flowPatch.getAllocateProtectedPath();
+ }
+ }
+
+ private boolean isOneSwitchFlow(FlowPatch patch, Flow flow) {
+ SwitchId srcSwitchId = Optional.ofNullable(patch.getSource()).map(PatchEndpoint::getSwitchId)
+ .orElse(flow.getSrcSwitchId());
+ SwitchId dstSwitchId = Optional.ofNullable(patch.getDestination()).map(PatchEndpoint::getSwitchId)
+ .orElse(flow.getDestSwitchId());
+ return srcSwitchId.equals(dstSwitchId);
}
private boolean isResultingVlanValueIsZero(PatchEndpoint patchEndpoint, int flowOuterVlan) {
diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
index 9b5497807..92e37b6d4 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
@@ -50,6 +50,7 @@ import org.openkilda.wfm.topology.nbworker.bolts.FlowOperationsCarrier;
import org.openkilda.wfm.topology.nbworker.services.FlowOperationsService.UpdateFlowResult;
import com.google.common.collect.Sets;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -75,12 +76,19 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
public static final SwitchId SWITCH_ID_3 = new SwitchId(3);
public static final SwitchId SWITCH_ID_4 = new SwitchId(4);
public static final int VLAN_1 = 1;
+ public static final int PORT_1 = 1;
+ public static final int PORT_2 = 2;
private static FlowOperationsService flowOperationsService;
private static FlowRepository flowRepository;
private static FlowPathRepository flowPathRepository;
private static PathSegmentRepository pathSegmentRepository;
private static SwitchRepository switchRepository;
+
+ private Switch switchA;
+ private Switch switchB;
+ private Switch switchC;
+ private Switch switchD;
@BeforeClass
public static void setUpOnce() {
@@ -92,8 +100,16 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
persistenceManager.getTransactionManager());
}
+ @Before
+ public void init() {
+ switchA = createSwitch(SWITCH_ID_1);
+ switchB = createSwitch(SWITCH_ID_2);
+ switchC = createSwitch(SWITCH_ID_3);
+ switchD = createSwitch(SWITCH_ID_4);
+ }
+
@Test
- public void shouldUpdateMaxLatencyPriorityAndPinnedFlowFields() throws FlowNotFoundException {
+ public void updateMaxLatencyPriorityAndPinnedFlowFieldsTest() throws FlowNotFoundException {
String testFlowId = "flow_id";
Long maxLatency = 555L;
Integer priority = 777;
@@ -102,10 +118,10 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
Flow flow = new TestFlowBuilder()
.flowId(testFlowId)
- .srcSwitch(createSwitch(SWITCH_ID_1))
+ .srcSwitch(switchA)
.srcPort(1)
.srcVlan(10)
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .destSwitch(switchB)
.destPort(2)
.destVlan(11)
.encapsulationType(FlowEncapsulationType.TRANSIT_VLAN)
@@ -150,8 +166,8 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
Flow flow = new TestFlowBuilder()
.flowId(testFlowId)
- .srcSwitch(createSwitch(SWITCH_ID_1))
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .srcSwitch(switchA)
+ .destSwitch(switchB)
.build();
flowRepository.add(flow);
@@ -170,8 +186,8 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
Flow flow = new TestFlowBuilder()
.flowId(testFlowId)
- .srcSwitch(createSwitch(SWITCH_ID_1))
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .srcSwitch(switchA)
+ .destSwitch(switchB)
.ignoreBandwidth(true)
.build();
flowRepository.add(flow);
@@ -191,8 +207,8 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
Flow flow = new TestFlowBuilder()
.flowId(testFlowId)
- .srcSwitch(createSwitch(SWITCH_ID_1))
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .srcSwitch(switchA)
+ .destSwitch(switchB)
.build();
flowRepository.add(flow);
@@ -207,15 +223,15 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldUpdateVlanStatistics() throws FlowNotFoundException {
+ public void updateVlanStatisticsTest() throws FlowNotFoundException {
String testFlowId = "flow_id";
Set<Integer> originalVlanStatistics = new HashSet<>();
originalVlanStatistics.add(11);
Flow flow = new TestFlowBuilder()
.flowId(testFlowId)
- .srcSwitch(createSwitch(SWITCH_ID_1))
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .srcSwitch(switchA)
+ .destSwitch(switchB)
.vlanStatistics(originalVlanStatistics)
.build();
flowRepository.add(flow);
@@ -234,28 +250,28 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldUpdateVlanStatisticsToZeroDstVlanIsZero() throws FlowNotFoundException {
- runShouldUpdateVlanStatisticsToZero(VLAN_1, 0);
+ public void updateVlanStatisticsToZeroDstVlanIsZeroTest() throws FlowNotFoundException {
+ runUpdateVlanStatisticsToZero(VLAN_1, 0);
}
@Test
- public void shouldUpdateVlanStatisticsToZeroSrcVlanIsZero() throws FlowNotFoundException {
- runShouldUpdateVlanStatisticsToZero(0, VLAN_1);
+ public void updateVlanStatisticsToZeroSrcVlanIsZeroTest() throws FlowNotFoundException {
+ runUpdateVlanStatisticsToZero(0, VLAN_1);
}
@Test
- public void shouldUpdateVlanStatisticsToZeroSrcAndVlanAreZero() throws FlowNotFoundException {
- runShouldUpdateVlanStatisticsToZero(0, 0);
+ public void updateVlanStatisticsToZeroSrcAndVlanAreZeroTest() throws FlowNotFoundException {
+ runUpdateVlanStatisticsToZero(0, 0);
}
- private void runShouldUpdateVlanStatisticsToZero(int srcVLan, int dstVlan)
+ private void runUpdateVlanStatisticsToZero(int srcVLan, int dstVlan)
throws FlowNotFoundException {
Set<Integer> originalVlanStatistics = Sets.newHashSet(1, 2, 3);
Flow flow = new TestFlowBuilder()
.flowId(FLOW_ID_1)
- .srcSwitch(createSwitch(SWITCH_ID_1))
+ .srcSwitch(switchA)
.srcVlan(srcVLan)
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .destSwitch(switchB)
.destVlan(dstVlan)
.vlanStatistics(originalVlanStatistics)
.build();
@@ -271,12 +287,12 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test(expected = IllegalArgumentException.class)
- public void shouldNotUpdateVlanStatistics() throws FlowNotFoundException {
+ public void unableToUpdateVlanStatisticsTest() throws FlowNotFoundException {
Flow flow = new TestFlowBuilder()
.flowId(FLOW_ID_1)
- .srcSwitch(createSwitch(SWITCH_ID_1))
+ .srcSwitch(switchA)
.srcVlan(VLAN_1)
- .destSwitch(createSwitch(SWITCH_ID_2))
+ .destSwitch(switchB)
.destVlan(VLAN_1)
.vlanStatistics(new HashSet<>())
.build();
@@ -290,8 +306,98 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
}
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToSetProtectedPathForInitiallyOneSwitchFlowTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchA, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToMakeOneSwitchFlowFromProtectedByUpdatingDstTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchB, true);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .destination(PatchEndpoint.builder().switchId(SWITCH_ID_1).build())
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToMakeOneSwitchFlowFromProtectedByUpdatingSrcTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchB, true);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .source(PatchEndpoint.builder().switchId(SWITCH_ID_2).build())
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToMakeProtectedOneSwitchFlowTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchB, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .source(PatchEndpoint.builder().switchId(SWITCH_ID_3).build())
+ .destination(PatchEndpoint.builder().switchId(SWITCH_ID_3).build())
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ }
+
+ @Test
+ public void ableToMakeProtectedFlowFromMultiSwitchFlowTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchB, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ // no exception expected
+ }
+
+ @Test
+ public void ableToMakeProtectedFlowFromOneSwitchByChangingSrcTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchA, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .source(PatchEndpoint.builder().switchId(SWITCH_ID_2).build())
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ // no exception expected
+ }
+
@Test
- public void shouldPrepareFlowUpdateResultWithChangedStrategy() {
+ public void ableToMakeProtectedFlowFromOneSwitchByChangingDstTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchA, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .destination(PatchEndpoint.builder().switchId(SWITCH_ID_2).build())
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ // no exception expected
+ }
+
+ @Test
+ public void ableToMakeProtectedFlowFromOneSwitchByChangingSrcAndDstTest() throws FlowNotFoundException {
+ createFlow(FLOW_ID_1, switchA, switchA, false);
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .source(PatchEndpoint.builder().switchId(SWITCH_ID_2).build())
+ .destination(PatchEndpoint.builder().switchId(SWITCH_ID_3).build())
+ .allocateProtectedPath(true)
+ .build();
+ flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ // no exception expected
+ }
+
+ @Test
+ public void prepareFlowUpdateResultWithChangedStrategyTest() {
// given: FlowPatch with COST strategy and Flow with MAX_LATENCY strategy
String flowId = "test_flow_id";
FlowPatch flowDto = FlowPatch.builder()
@@ -314,7 +420,7 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldPrepareFlowUpdateResultWithChangedMaxLatencyFirstCase() {
+ public void prepareFlowUpdateResultWithChangedMaxLatencyFirstCaseTest() {
// given: FlowPatch with max latency and no strategy and Flow with MAX_LATENCY strategy and no max latency
String flowId = "test_flow_id";
FlowPatch flowDto = FlowPatch.builder()
@@ -336,7 +442,7 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldPrepareFlowUpdateResultWithChangedMaxLatencySecondCase() {
+ public void prepareFlowUpdateResultWithChangedMaxLatencySecondCaseTest() {
// given: FlowPatch with max latency and MAX_LATENCY strategy
// and Flow with MAX_LATENCY strategy and no max latency
String flowId = "test_flow_id";
@@ -360,7 +466,7 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldPrepareFlowUpdateResultShouldNotUpdateFirstCase() {
+ public void prepareFlowUpdateResultNotUpdateFirstCaseTest() {
// given: FlowPatch with max latency and MAX_LATENCY strategy
// and Flow with MAX_LATENCY strategy and same max latency
String flowId = "test_flow_id";
@@ -385,7 +491,7 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldPrepareFlowUpdateResultShouldNotUpdateSecondCase() {
+ public void prepareFlowUpdateResultNotUpdateSecondCaseTest() {
// given: FlowPatch with no max latency and no strategy
// and Flow with MAX_LATENCY strategy and max latency
String flowId = "test_flow_id";
@@ -406,7 +512,7 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void shouldPrepareFlowUpdateResultWithNeedUpdateFlag() {
+ public void prepareFlowUpdateResultWithNeedUpdateFlagTest() {
String flowId = "test_flow_id";
Flow flow = Flow.builder()
.flowId(flowId)
@@ -521,10 +627,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointNotReturnFlowsForOrphanedPaths() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
- Switch switchC = createSwitch(SWITCH_ID_3);
- Switch switchD = createSwitch(SWITCH_ID_4);
Flow flow = createFlow(FLOW_ID_1, switchA, 1, switchC, 2, FORWARD_PATH_1, REVERSE_PATH_1, switchB);
createOrphanFlowPaths(flow, switchA, 1, switchC, 2, FORWARD_PATH_3, REVERSE_PATH_3, switchD);
assertEquals(0, flowOperationsService.getFlowsForEndpoint(switchD.getSwitchId(), null).size());
@@ -532,7 +634,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointOneSwitchFlowNoPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
createFlow(FLOW_ID_1, switchA, 1, switchA, 2, FORWARD_PATH_1, REVERSE_PATH_1, null);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_1, null), FLOW_ID_1);
@@ -543,8 +644,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointMultiSwitchFlowNoPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
createFlow(FLOW_ID_1, switchA, 1, switchB, 2, FORWARD_PATH_1, REVERSE_PATH_1, null);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_1, null), FLOW_ID_1);
@@ -555,9 +654,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointTransitSwitchFlowNoPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
- Switch switchC = createSwitch(SWITCH_ID_3);
createFlow(FLOW_ID_1, switchA, 1, switchC, 2, FORWARD_PATH_1, REVERSE_PATH_1, switchB);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_2, null), FLOW_ID_1);
@@ -568,9 +664,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointSeveralFlowNoPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
- Switch switchC = createSwitch(SWITCH_ID_3);
// one switch flow
createFlow(FLOW_ID_1, switchB, 1, switchB, 2, FORWARD_PATH_1, REVERSE_PATH_1, null);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_2, null), FLOW_ID_1);
@@ -586,7 +679,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointOneSwitchFlowWithPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
createFlow(FLOW_ID_1, switchA, 1, switchA, 2, FORWARD_PATH_1, REVERSE_PATH_1, null);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_1, 1), FLOW_ID_1);
@@ -598,8 +690,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointMultiSwitchFlowWithPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
createFlow(FLOW_ID_1, switchA, 1, switchB, 2, FORWARD_PATH_1, REVERSE_PATH_1, null);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_1, 1), FLOW_ID_1);
@@ -610,9 +700,6 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
@Test
public void getFlowsForEndpointTransitSwitchFlowWithPortTest() throws SwitchNotFoundException {
- Switch switchA = createSwitch(SWITCH_ID_1);
- Switch switchB = createSwitch(SWITCH_ID_2);
- Switch switchC = createSwitch(SWITCH_ID_3);
createFlow(FLOW_ID_1, switchA, 1, switchC, 2, FORWARD_PATH_1, REVERSE_PATH_1, switchB);
assertFlows(flowOperationsService.getFlowsForEndpoint(SWITCH_ID_2, 2), FLOW_ID_1);
@@ -674,8 +761,19 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
flow.addPaths(forwardPath, reversePath);
}
+ private void createFlow(String flowId, Switch srcSwitch, Switch dstSwitch, Boolean protectedPath) {
+ createFlow(flowId, srcSwitch, PORT_1, dstSwitch, PORT_2,
+ FORWARD_PATH_1, REVERSE_PATH_1, null, protectedPath);
+ }
+
private Flow createFlow(String flowId, Switch srcSwitch, int srcPort, Switch dstSwitch, int dstPort,
PathId forwardPartId, PathId reversePathId, Switch transitSwitch) {
+ return createFlow(
+ flowId, srcSwitch, srcPort, dstSwitch, dstPort, forwardPartId, reversePathId, transitSwitch, false);
+ }
+
+ private Flow createFlow(String flowId, Switch srcSwitch, int srcPort, Switch dstSwitch, int dstPort,
+ PathId forwardPartId, PathId reversePathId, Switch transitSwitch, boolean protectedPath) {
Flow flow = Flow.builder()
.flowId(flowId)
@@ -684,9 +782,9 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
.destSwitch(dstSwitch)
.destPort(dstPort)
.status(FlowStatus.UP)
+ .allocateProtectedPath(protectedPath)
.build();
-
FlowPath forwardPath = FlowPath.builder()
.pathId(forwardPartId)
.srcSwitch(srcSwitch) | ['src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java', 'src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,639,604 | 2,431,756 | 298,185 | 3,169 | 950 | 196 | 20 | 1 | 759 | 97 | 236 | 41 | 0 | 2 | 1970-01-01T00:27:56 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,067 | telstra/open-kilda/3987/3960 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/3960 | https://github.com/telstra/open-kilda/pull/3987 | https://github.com/telstra/open-kilda/pull/3987 | 1 | close | Flow loops: Installing the exact same loop twice leads to deletion of flow loop rules | 1. Create a flow
2. Create a flow loop on src
3. Repeat the same request from previous step
**Expected:** Flow loop rules are just re-installed
**Actual:** Flow loop rules are removed. Switch validation shows missing rules
test: `"Attempt to create the exact same flowLoop twice just re-installs the rules"` | 814b3510fa9701ae75c90ed64b4f2aaa29319adf | 24a912c37f6c16dfb0599466b189a936994a0044 | https://github.com/telstra/open-kilda/compare/814b3510fa9701ae75c90ed64b4f2aaa29319adf...24a912c37f6c16dfb0599466b189a936994a0044 | diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/UpdateFlowAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/UpdateFlowAction.java
index 922cc4d67..8b820d7b1 100644
--- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/UpdateFlowAction.java
+++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/UpdateFlowAction.java
@@ -234,10 +234,10 @@ public class UpdateFlowAction extends NbTrackableAction<FlowUpdateFsm, State, Ev
}
private FlowLoopOperation detectFlowLoopOperation(RequestedFlow originalFlow, RequestedFlow targetFlow) {
- if (originalFlow.getLoopSwitchId() == targetFlow.getLoopSwitchId()) {
+ if (originalFlow.getLoopSwitchId() == null && targetFlow.getLoopSwitchId() == null) {
return FlowLoopOperation.NONE;
}
- if (originalFlow.getLoopSwitchId() != null) {
+ if (targetFlow.getLoopSwitchId() == null) {
return FlowLoopOperation.DELETE;
} else {
return FlowLoopOperation.CREATE; | ['src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/UpdateFlowAction.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 8,285,496 | 1,728,579 | 217,386 | 2,357 | 281 | 71 | 4 | 1 | 314 | 50 | 71 | 7 | 0 | 0 | 1970-01-01T00:26:50 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,038 | telstra/open-kilda/5079/5075 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5075 | https://github.com/telstra/open-kilda/pull/5079 | https://github.com/telstra/open-kilda/pull/5079 | 1 | closes | flow_id in the request line is ignored while flow update | Steps
case 1
1) create flow (e.g. with flow_id = flow1) - POST [/v2/flows]
2) make **wrong** put request to update it - - PUT [/v2/flows/flow_not_match]
where body of the request contains flow_id = flow1
note: system shouldn't contain any flow with "flow_not_match" id
actual result: flow is updated successfully
expected result: 404 with the corresponding error message like "flow not found"
case 2
1) create flow (e.g. with flow_id = flow1) - POST [/v2/flows]
2) make put request to update it - - PUT [/v2/flows/flow1]
where body of the request **doesn't contains** flow_id = flow1
e.g. body should contain flow_id = flow_not_match
note: system shouldn't contain any flow with "flow_not_match" id
actual result: 404 with the corresponding error message like "flow not found"
expected result: flow should be updated, flow_id from the body should be ignored
actually we are taking flow_id from body, but it should be taken from the request line
also flow_id update is not supported, so it can be set as nullable in the body
| 49dc7cf975b3d44595e49c9329d72df4c435f87b | 97c4998939f7c99be379a7efc5586cb107a3f0bf | https://github.com/telstra/open-kilda/compare/49dc7cf975b3d44595e49c9329d72df4c435f87b...97c4998939f7c99be379a7efc5586cb107a3f0bf | diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/FlowController.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/FlowController.java
index 003911b8f..ad05eef1d 100644
--- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/FlowController.java
+++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/FlowController.java
@@ -137,7 +137,7 @@ public class FlowController extends BaseController {
@ResponseStatus(HttpStatus.OK)
public CompletableFuture<FlowResponsePayload> updateFlow(@PathVariable(name = "flow-id") String flowId,
@RequestBody FlowUpdatePayload flow) {
- return flowService.updateFlow(flow);
+ return flowService.updateFlow(flowId, flow);
}
/**
diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v2/FlowControllerV2.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v2/FlowControllerV2.java
index a64191910..3b4727345 100644
--- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v2/FlowControllerV2.java
+++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v2/FlowControllerV2.java
@@ -80,7 +80,7 @@ public class FlowControllerV2 extends BaseController {
public CompletableFuture<FlowResponseV2> updateFlow(@PathVariable(name = "flow_id") String flowId,
@RequestBody FlowRequestV2 flow) {
verifyRequest(flow);
- return flowService.updateFlow(flow);
+ return flowService.updateFlow(flowId, flow);
}
/**
diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/FlowService.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/FlowService.java
index 863d75610..6418894c3 100644
--- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/FlowService.java
+++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/FlowService.java
@@ -87,7 +87,7 @@ public interface FlowService {
* @param flow flow
* @return updated flow
*/
- CompletableFuture<FlowResponsePayload> updateFlow(final FlowUpdatePayload flow);
+ CompletableFuture<FlowResponsePayload> updateFlow(final String flowId, final FlowUpdatePayload flow);
/**
* Updates flow.
@@ -95,7 +95,7 @@ public interface FlowService {
* @param flow flow
* @return updated flow
*/
- CompletableFuture<FlowResponseV2> updateFlow(FlowRequestV2 flow);
+ CompletableFuture<FlowResponseV2> updateFlow(final String flowId, FlowRequestV2 flow);
/**
* Patch flow.
diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java
index 7bb83e6b7..93ba668b4 100644
--- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java
+++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java
@@ -239,8 +239,8 @@ public class FlowServiceImpl implements FlowService {
* {@inheritDoc}
*/
@Override
- public CompletableFuture<FlowResponsePayload> updateFlow(final FlowUpdatePayload request) {
- log.info("API request: Update flow request for flow {}", request);
+ public CompletableFuture<FlowResponsePayload> updateFlow(final String flowId, final FlowUpdatePayload request) {
+ log.info("API request: Update flow request for flow {}: {}", flowId, request);
final String correlationId = RequestCorrelationId.getId();
FlowRequest updateRequest;
@@ -252,6 +252,7 @@ public class FlowServiceImpl implements FlowService {
throw new MessageException(correlationId, System.currentTimeMillis(), ErrorType.DATA_INVALID,
e.getMessage(), "Can not parse arguments of the update flow request");
}
+ validateFlowId(updateRequest.getFlowId(), flowId, correlationId);
CommandMessage command = new CommandMessage(updateRequest,
System.currentTimeMillis(), correlationId, Destination.WFM);
@@ -263,8 +264,8 @@ public class FlowServiceImpl implements FlowService {
}
@Override
- public CompletableFuture<FlowResponseV2> updateFlow(FlowRequestV2 request) {
- log.info("API request: Processing flow update: {}", request);
+ public CompletableFuture<FlowResponseV2> updateFlow(final String flowId, FlowRequestV2 request) {
+ log.info("API request: Update flow request for flow {}: {}", flowId, request);
final String correlationId = RequestCorrelationId.getId();
FlowRequest updateRequest;
@@ -276,6 +277,7 @@ public class FlowServiceImpl implements FlowService {
throw new MessageException(correlationId, System.currentTimeMillis(), ErrorType.DATA_INVALID,
e.getMessage(), "Can not parse arguments of the update flow request");
}
+ validateFlowId(updateRequest.getFlowId(), flowId, correlationId);
CommandMessage command = new CommandMessage(updateRequest,
System.currentTimeMillis(), correlationId, Destination.WFM);
@@ -861,4 +863,12 @@ public class FlowServiceImpl implements FlowService {
.map(encoder)
.collect(Collectors.toList()));
}
+
+ private void validateFlowId(String requestFlowId, String pathFlowId, String correlationId) {
+ if (!requestFlowId.equals(pathFlowId)) {
+ throw new MessageException(correlationId, System.currentTimeMillis(), ErrorType.DATA_INVALID,
+ "flow_id from body and from path are different",
+ format("Body flow_id: %s, path flow_id: %s", requestFlowId, pathFlowId));
+ }
+ }
}
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/TestConfig.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/TestConfig.java
index d957ca40a..af5318997 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/TestConfig.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/TestConfig.java
@@ -18,7 +18,7 @@ package org.openkilda.northbound.controller;
import org.openkilda.northbound.config.KafkaConfig;
import org.openkilda.northbound.config.SecurityConfig;
import org.openkilda.northbound.config.WebConfig;
-import org.openkilda.northbound.controller.v1.TestMessageMock;
+import org.openkilda.northbound.controller.mock.TestMessageMock;
import org.openkilda.northbound.messaging.MessagingChannel;
import org.openkilda.northbound.utils.CorrelationIdFactory;
import org.openkilda.northbound.utils.TestCorrelationIdFactory;
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/TestMessageMock.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/mock/TestMessageMock.java
similarity index 76%
rename from src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/TestMessageMock.java
rename to src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/mock/TestMessageMock.java
index d68c638c2..c65edb3e4 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/TestMessageMock.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/mock/TestMessageMock.java
@@ -13,10 +13,12 @@
* limitations under the License.
*/
-package org.openkilda.northbound.controller.v1;
+package org.openkilda.northbound.controller.mock;
+import static java.lang.String.format;
import static java.util.Collections.singletonList;
import static java.util.concurrent.CompletableFuture.completedFuture;
+import static org.openkilda.messaging.Utils.DEFAULT_CORRELATION_ID;
import static org.openkilda.messaging.error.ErrorType.OPERATION_TIMED_OUT;
import org.openkilda.messaging.Destination;
@@ -30,6 +32,7 @@ import org.openkilda.messaging.command.switches.SwitchRulesDeleteRequest;
import org.openkilda.messaging.error.ErrorData;
import org.openkilda.messaging.error.ErrorMessage;
import org.openkilda.messaging.error.ErrorType;
+import org.openkilda.messaging.error.MessageError;
import org.openkilda.messaging.error.MessageException;
import org.openkilda.messaging.info.InfoData;
import org.openkilda.messaging.info.event.PathInfoData;
@@ -53,6 +56,7 @@ import org.openkilda.messaging.payload.flow.PathNodePayload;
import org.openkilda.model.SwitchId;
import org.openkilda.northbound.dto.v2.flows.DetectConnectedDevicesV2;
import org.openkilda.northbound.dto.v2.flows.FlowEndpointV2;
+import org.openkilda.northbound.dto.v2.flows.FlowRequestV2;
import org.openkilda.northbound.dto.v2.flows.SwapFlowEndpointPayload;
import org.openkilda.northbound.dto.v2.flows.SwapFlowPayload;
import org.openkilda.northbound.messaging.MessagingChannel;
@@ -71,33 +75,42 @@ import java.util.concurrent.ConcurrentHashMap;
*/
@Component
public class TestMessageMock implements MessagingChannel {
- static final String FLOW_ID = "ff:00";
+ public static final String FLOW_ID = "flow_id_1";
static final String SECOND_FLOW_ID = "second_flow";
- static final SwitchId SWITCH_ID = new SwitchId(FLOW_ID);
+ public static final String FLOW_ID_FROM_PATH = "different_flow_id";
+ static final SwitchId SWITCH_ID = new SwitchId("ff:00");
static final SwitchId SECOND_SWITCH_ID = new SwitchId("ff:01");
- static final String ERROR_FLOW_ID = "error-flow";
- static final String TEST_SWITCH_ID = "ff:01";
- static final long TEST_SWITCH_RULE_COOKIE = 1L;
- static final FlowEndpointPayload flowEndpoint = new FlowEndpointPayload(SWITCH_ID, 1, 1,
+ public static final String ERROR_FLOW_ID = "error-flow";
+ public static final String TEST_SWITCH_ID = "ff:01";
+ public static final long TEST_SWITCH_RULE_COOKIE = 1L;
+ public static final FlowEndpointPayload FLOW_ENDPOINT = new FlowEndpointPayload(SWITCH_ID, 1, 1,
new DetectConnectedDevicesPayload(false, false));
static final FlowEndpointPayload secondFlowEndpoint = new FlowEndpointPayload(SECOND_SWITCH_ID, 2, 2,
new DetectConnectedDevicesPayload(false, false));
- static final FlowEndpointV2 flowPayloadEndpoint = new FlowEndpointV2(SWITCH_ID, 1, 1,
+ public static final FlowEndpointV2 FLOW_PAYLOAD_ENDPOINT = new FlowEndpointV2(SWITCH_ID, 1, 1,
new DetectConnectedDevicesV2(false, false));
static final FlowEndpointV2 secondFlowPayloadEndpoint = new FlowEndpointV2(SECOND_SWITCH_ID, 2, 2,
new DetectConnectedDevicesV2(false, false));
- public static final FlowPayload flow = FlowPayload.builder()
+
+ public static final FlowPayload FLOW = FlowPayload.builder()
.id(FLOW_ID)
- .source(flowEndpoint)
- .destination(flowEndpoint)
+ .source(FLOW_ENDPOINT)
+ .destination(FLOW_ENDPOINT)
.maximumBandwidth(10000)
.description(FLOW_ID)
.status(FlowState.UP.getState())
.build();
- public static final FlowResponsePayload flowResponsePayload = FlowResponsePayload.flowResponsePayloadBuilder()
+
+ public static final FlowRequestV2 FLOW_REQUEST_V2 = FlowRequestV2.builder()
+ .flowId(FLOW_ID)
+ .source(FLOW_PAYLOAD_ENDPOINT)
+ .destination(FLOW_PAYLOAD_ENDPOINT)
+ .build();
+
+ public static final FlowResponsePayload FLOW_RESPONSE_PAYLOAD = FlowResponsePayload.flowResponsePayloadBuilder()
.id(FLOW_ID)
- .source(flowEndpoint)
- .destination(flowEndpoint)
+ .source(FLOW_ENDPOINT)
+ .destination(FLOW_ENDPOINT)
.maximumBandwidth(10000)
.description(FLOW_ID)
.status(FlowState.UP.getState())
@@ -105,8 +118,8 @@ public class TestMessageMock implements MessagingChannel {
public static final SwapFlowPayload firstSwapFlow = SwapFlowPayload.builder()
.flowId(FLOW_ID)
- .source(flowPayloadEndpoint)
- .destination(flowPayloadEndpoint)
+ .source(FLOW_PAYLOAD_ENDPOINT)
+ .destination(FLOW_PAYLOAD_ENDPOINT)
.build();
public static final SwapFlowPayload secondSwapFlow = SwapFlowPayload.builder()
@@ -116,10 +129,10 @@ public class TestMessageMock implements MessagingChannel {
.build();
public static final SwapFlowEndpointPayload bulkFlow = new SwapFlowEndpointPayload(firstSwapFlow, secondSwapFlow);
- static final FlowIdStatusPayload flowStatus = new FlowIdStatusPayload(FLOW_ID, FlowState.UP);
+ public static final FlowIdStatusPayload FLOW_STATUS = new FlowIdStatusPayload(FLOW_ID, FlowState.UP);
static final PathInfoData path = new PathInfoData(0L, Collections.emptyList());
static final List<PathNodePayload> pathPayloadsList = singletonList(new PathNodePayload(SWITCH_ID, 1, 1));
- static final FlowPathPayload flowPath = FlowPathPayload.builder()
+ public static final FlowPathPayload FLOW_PATH = FlowPathPayload.builder()
.id(FLOW_ID)
.forwardPath(pathPayloadsList)
.reversePath(pathPayloadsList)
@@ -151,6 +164,10 @@ public class TestMessageMock implements MessagingChannel {
new SwitchRulesResponse(singletonList(TEST_SWITCH_RULE_COOKIE));
private static final Map<String, CommandData> messages = new ConcurrentHashMap<>();
+ public static final MessageError DIFFERENT_FLOW_ID_ERROR = new MessageError(DEFAULT_CORRELATION_ID, 0,
+ ErrorType.DATA_INVALID.toString(), "flow_id from body and from path are different",
+ format("Body flow_id: %s, path flow_id: %s", FLOW_ID, FLOW_ID_FROM_PATH));
+
/**
* Chooses response by request.
*
@@ -160,7 +177,7 @@ public class TestMessageMock implements MessagingChannel {
private CompletableFuture<InfoData> formatResponse(final String correlationId, final CommandData data) {
CompletableFuture<InfoData> result = new CompletableFuture<>();
if (data instanceof FlowRequest) {
- result.complete(flowResponse);
+ result.complete(buildFlowResponse((FlowRequest) data));
} else if (data instanceof FlowDeleteRequest) {
result.complete(flowResponse);
} else if (data instanceof FlowReadRequest) {
@@ -176,6 +193,22 @@ public class TestMessageMock implements MessagingChannel {
return result;
}
+ private FlowResponse buildFlowResponse(FlowRequest flowRequest) {
+ return new FlowResponse(FlowDto.builder()
+ .flowId(flowRequest.getFlowId())
+ .bandwidth(flowRequest.getBandwidth())
+ .description(flowRequest.getDescription())
+ .sourceSwitch(flowRequest.getSource().getSwitchId())
+ .destinationSwitch(flowRequest.getDestination().getSwitchId())
+ .sourcePort(flowRequest.getSource().getPortNumber())
+ .destinationPort(flowRequest.getDestination().getPortNumber())
+ .sourceVlan(flowRequest.getSource().getOuterVlanId())
+ .destinationVlan(flowRequest.getDestination().getOuterVlanId())
+ .meterId(1)
+ .state(FlowState.UP)
+ .build());
+ }
+
@Override
public CompletableFuture<InfoData> sendAndGet(String topic, Message message) {
if ("error-topic".equals(topic)) {
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/FlowControllerTest.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/FlowControllerTest.java
index 1f3d8ae0b..72c644f10 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/FlowControllerTest.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/FlowControllerTest.java
@@ -20,7 +20,7 @@ import static org.openkilda.messaging.Utils.CORRELATION_ID;
import static org.openkilda.messaging.Utils.DEFAULT_CORRELATION_ID;
import static org.openkilda.messaging.Utils.EXTRA_AUTH;
import static org.openkilda.messaging.Utils.MAPPER;
-import static org.openkilda.northbound.controller.v1.TestMessageMock.ERROR_FLOW_ID;
+import static org.openkilda.northbound.controller.mock.TestMessageMock.ERROR_FLOW_ID;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
@@ -37,6 +37,7 @@ import org.openkilda.messaging.payload.flow.FlowIdStatusPayload;
import org.openkilda.messaging.payload.flow.FlowPathPayload;
import org.openkilda.messaging.payload.flow.FlowResponsePayload;
import org.openkilda.northbound.controller.TestConfig;
+import org.openkilda.northbound.controller.mock.TestMessageMock;
import org.openkilda.northbound.utils.RequestCorrelationId;
import com.fasterxml.jackson.core.type.TypeReference;
@@ -90,7 +91,7 @@ public class FlowControllerTest {
MvcResult mvcResult = mockMvc.perform(put("/v1/flows")
.header(CORRELATION_ID, testCorrelationId())
.contentType(APPLICATION_JSON_VALUE)
- .content(MAPPER.writeValueAsString(TestMessageMock.flow)))
+ .content(MAPPER.writeValueAsString(TestMessageMock.FLOW)))
.andReturn();
MvcResult result = mockMvc.perform(asyncDispatch(mvcResult))
@@ -100,7 +101,7 @@ public class FlowControllerTest {
System.out.println("RESPONSE: " + result.getResponse().getContentAsString());
FlowResponsePayload response = MAPPER.readValue(result.getResponse().getContentAsString(),
FlowResponsePayload.class);
- assertEquals(TestMessageMock.flowResponsePayload, response);
+ assertEquals(TestMessageMock.FLOW_RESPONSE_PAYLOAD, response);
}
@Test
@@ -118,7 +119,7 @@ public class FlowControllerTest {
.andReturn();
FlowResponsePayload response = MAPPER.readValue(result.getResponse().getContentAsString(),
FlowResponsePayload.class);
- assertEquals(TestMessageMock.flowResponsePayload, response);
+ assertEquals(TestMessageMock.FLOW_RESPONSE_PAYLOAD, response);
}
@Test
@@ -135,7 +136,7 @@ public class FlowControllerTest {
.andReturn();
FlowResponsePayload response = MAPPER.readValue(result.getResponse().getContentAsString(),
FlowResponsePayload.class);
- assertEquals(TestMessageMock.flowResponsePayload, response);
+ assertEquals(TestMessageMock.FLOW_RESPONSE_PAYLOAD, response);
}
@Test
@@ -153,7 +154,7 @@ public class FlowControllerTest {
.andReturn();
FlowResponsePayload[] response = MAPPER.readValue(result.getResponse().getContentAsString(),
FlowResponsePayload[].class);
- assertEquals(TestMessageMock.flowResponsePayload, response[0]);
+ assertEquals(TestMessageMock.FLOW_RESPONSE_PAYLOAD, response[0]);
}
@Test
@@ -171,7 +172,7 @@ public class FlowControllerTest {
MvcResult mvcResult = mockMvc.perform(put("/v1/flows/{flow-id}", TestMessageMock.FLOW_ID)
.header(CORRELATION_ID, testCorrelationId())
.contentType(APPLICATION_JSON_VALUE)
- .content(MAPPER.writeValueAsString(TestMessageMock.flow)))
+ .content(MAPPER.writeValueAsString(TestMessageMock.FLOW)))
.andReturn();
MvcResult result = mockMvc.perform(asyncDispatch(mvcResult))
@@ -180,9 +181,25 @@ public class FlowControllerTest {
.andReturn();
FlowResponsePayload response = MAPPER.readValue(result.getResponse().getContentAsString(),
FlowResponsePayload.class);
- assertEquals(TestMessageMock.flowResponsePayload, response);
+ assertEquals(TestMessageMock.FLOW_RESPONSE_PAYLOAD, response);
}
+ @Test
+ @WithMockUser(username = USERNAME, password = PASSWORD, roles = ROLE)
+ public void updateFlowDifferentFlowIdInPathFails() throws Exception {
+ MvcResult result = mockMvc.perform(put("/v1/flows/{flow-id}", TestMessageMock.FLOW_ID_FROM_PATH)
+ .header(CORRELATION_ID, testCorrelationId())
+ .contentType(APPLICATION_JSON_VALUE)
+ .content(MAPPER.writeValueAsString(TestMessageMock.FLOW)))
+ .andExpect(status().isBadRequest())
+ .andExpect(content().contentType(APPLICATION_JSON_VALUE))
+ .andReturn();
+
+ MessageError response = MAPPER.readValue(result.getResponse().getContentAsString(), MessageError.class);
+ assertEquals(TestMessageMock.DIFFERENT_FLOW_ID_ERROR, response);
+ }
+
+
@Test
@WithMockUser(username = USERNAME, password = PASSWORD, roles = ROLE)
public void getFlows() throws Exception {
@@ -198,7 +215,7 @@ public class FlowControllerTest {
List<FlowResponsePayload> response = MAPPER.readValue(
result.getResponse().getContentAsString(),
new TypeReference<List<FlowResponsePayload>>() {});
- assertEquals(Collections.singletonList(TestMessageMock.flowResponsePayload), response);
+ assertEquals(Collections.singletonList(TestMessageMock.FLOW_RESPONSE_PAYLOAD), response);
}
@Test
@@ -215,7 +232,7 @@ public class FlowControllerTest {
.andReturn();
FlowIdStatusPayload response =
MAPPER.readValue(result.getResponse().getContentAsString(), FlowIdStatusPayload.class);
- assertEquals(TestMessageMock.flowStatus, response);
+ assertEquals(TestMessageMock.FLOW_STATUS, response);
}
@Test
@@ -231,7 +248,7 @@ public class FlowControllerTest {
.andExpect(content().contentType(APPLICATION_JSON_VALUE))
.andReturn();
FlowPathPayload response = MAPPER.readValue(result.getResponse().getContentAsString(), FlowPathPayload.class);
- assertEquals(TestMessageMock.flowPath, response);
+ assertEquals(TestMessageMock.FLOW_PATH, response);
}
@Test
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/SwitchControllerTest.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/SwitchControllerTest.java
index e76fad6c0..f659527ab 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/SwitchControllerTest.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/SwitchControllerTest.java
@@ -19,8 +19,8 @@ import static org.junit.Assert.assertEquals;
import static org.openkilda.messaging.Utils.CORRELATION_ID;
import static org.openkilda.messaging.Utils.EXTRA_AUTH;
import static org.openkilda.messaging.Utils.MAPPER;
-import static org.openkilda.northbound.controller.v1.TestMessageMock.TEST_SWITCH_ID;
-import static org.openkilda.northbound.controller.v1.TestMessageMock.TEST_SWITCH_RULE_COOKIE;
+import static org.openkilda.northbound.controller.mock.TestMessageMock.TEST_SWITCH_ID;
+import static org.openkilda.northbound.controller.mock.TestMessageMock.TEST_SWITCH_RULE_COOKIE;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.asyncDispatch;
diff --git a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v2/FlowControllerTest.java b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v2/FlowControllerTest.java
index 94a27c2a4..e1190d142 100644
--- a/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v2/FlowControllerTest.java
+++ b/src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v2/FlowControllerTest.java
@@ -23,11 +23,13 @@ import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.asyncDispatch;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+import org.openkilda.messaging.error.MessageError;
import org.openkilda.northbound.controller.TestConfig;
-import org.openkilda.northbound.controller.v1.TestMessageMock;
+import org.openkilda.northbound.controller.mock.TestMessageMock;
import org.openkilda.northbound.dto.v2.flows.SwapFlowEndpointPayload;
import org.openkilda.northbound.utils.RequestCorrelationId;
@@ -87,6 +89,21 @@ public class FlowControllerTest {
assertEquals(TestMessageMock.bulkFlow.getSecondFlow(), response.getSecondFlow());
}
+ @Test
+ @WithMockUser(username = USERNAME, password = PASSWORD, roles = ROLE)
+ public void updateFlowDifferentFlowIdInPathFails() throws Exception {
+ MvcResult result = mockMvc.perform(put("/v2/flows/{flow-id}", TestMessageMock.FLOW_ID_FROM_PATH)
+ .header(CORRELATION_ID, testCorrelationId())
+ .contentType(APPLICATION_JSON_VALUE)
+ .content(MAPPER.writeValueAsString(TestMessageMock.FLOW_REQUEST_V2)))
+ .andExpect(status().isBadRequest())
+ .andExpect(content().contentType(APPLICATION_JSON_VALUE))
+ .andReturn();
+
+ MessageError response = MAPPER.readValue(result.getResponse().getContentAsString(), MessageError.class);
+ assertEquals(TestMessageMock.DIFFERENT_FLOW_ID_ERROR, response);
+ }
+
private static String testCorrelationId() {
return UUID.randomUUID().toString();
} | ['src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v2/FlowControllerTest.java', 'src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/SwitchControllerTest.java', 'src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/FlowController.java', 'src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/TestMessageMock.java', 'src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v2/FlowControllerV2.java', 'src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/FlowService.java', 'src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java', 'src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/TestConfig.java', 'src-java/northbound-service/northbound/src/test/java/org/openkilda/northbound/controller/v1/FlowControllerTest.java'] | {'.java': 9} | 9 | 9 | 0 | 0 | 9 | 11,509,471 | 2,403,454 | 294,496 | 3,136 | 1,865 | 382 | 26 | 4 | 1,055 | 169 | 271 | 24 | 0 | 0 | 1970-01-01T00:27:55 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |
2,040 | telstra/open-kilda/5064/5063 | telstra | open-kilda | https://github.com/telstra/open-kilda/issues/5063 | https://github.com/telstra/open-kilda/pull/5064 | https://github.com/telstra/open-kilda/pull/5064 | 1 | closes | Can't patch flow with empty vlan stats and non zeros src/dst vlans | **Steps to reproduce:**
1. Create a flow with non empty Vlans stats and with 0 src and dst vlans via v2 API `POST /v2/flows`
```json
{
"flow_id": "flow1",
"source": {
"inner_vlan_id": 0,
"port_number": 10,
"switch_id": "00:00:00:00:00:00:00:01",
"vlan_id": 0
},
"destination": {
"inner_vlan_id": 0,
"port_number": 0,
"switch_id": "00:00:00:00:00:00:00:02",
"vlan_id": 0
},
"maximum_bandwidth": 10000,
"statistics": {
"vlans": [
1,2,3
]
}
}
```
2. Patch the flow with set vlans and empty stat vlans `PATCH /v2/flows/{flow_id}`
```json
{
"source": {
"vlan_id": 15
},
"destination": {
"vlan_id": 17
},
"statistics": {
"vlans": []
}
}
```
**Expected result:**
Successful response:
**Actual result:**
`To collect vlan statistics you need to set source or destination vlan_id to zero` | a6cf9493858a2fc0ee18e43954a2bc4b278327af | 2818012ce1ae52b9ea7542f22bdbc60fd61d1378 | https://github.com/telstra/open-kilda/compare/a6cf9493858a2fc0ee18e43954a2bc4b278327af...2818012ce1ae52b9ea7542f22bdbc60fd61d1378 | diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
index baed2cba6..c1cd1ed1c 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java
@@ -572,8 +572,7 @@ public class FlowOperationsService {
+ "at the same time");
}
- if ((flow.getVlanStatistics() != null && !flow.getVlanStatistics().isEmpty())
- || (flowPatch.getVlanStatistics() != null && !flowPatch.getVlanStatistics().isEmpty())) {
+ if (!isVlanStatisticsEmpty(flowPatch, flow)) {
boolean zeroResultSrcVlan = isResultingVlanValueIsZero(flowPatch.getSource(), flow.getSrcVlan());
boolean zeroResultDstVlan = isResultingVlanValueIsZero(flowPatch.getDestination(), flow.getDestVlan());
@@ -733,6 +732,13 @@ public class FlowOperationsService {
.collect(Collectors.toSet());
}
+ private static boolean isVlanStatisticsEmpty(FlowPatch flowPatch, Flow flow) {
+ if (flowPatch.getVlanStatistics() != null) {
+ return flowPatch.getVlanStatistics().isEmpty();
+ }
+ return flow.getVlanStatistics() == null || flow.getVlanStatistics().isEmpty();
+ }
+
@Data
@Builder
static class UpdateFlowResult {
diff --git a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
index d8db615fc..2eb4d40f3 100644
--- a/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
+++ b/src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java
@@ -80,8 +80,10 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
public static final SwitchId SWITCH_ID_3 = new SwitchId(3);
public static final SwitchId SWITCH_ID_4 = new SwitchId(4);
public static final int VLAN_1 = 1;
- public static final int PORT_1 = 1;
- public static final int PORT_2 = 2;
+ public static final int PORT_1 = 2;
+ public static final int PORT_2 = 3;
+ public static final int VLAN_2 = 4;
+ public static final int VLAN_3 = 5;
private static FlowOperationsService flowOperationsService;
private static FlowRepository flowRepository;
@@ -255,28 +257,111 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
}
@Test
- public void updateVlanStatisticsToZeroDstVlanIsZeroTest() throws FlowNotFoundException, InvalidFlowException {
- runUpdateVlanStatisticsToZero(VLAN_1, 0);
+ public void updateVlanStatisticsToZeroOldSrcAndDstVlanAreZeroTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, 0, VLAN_1, VLAN_2);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroOldDstVlanAreNotZeroTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(VLAN_3, 0, VLAN_1, VLAN_2);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroOldSrcVlanAreNotZeroTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, VLAN_3, VLAN_1, VLAN_2);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroDstVlanIsZeroNewVlansNullTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(VLAN_1, 0, null, null);
}
@Test
- public void updateVlanStatisticsToZeroSrcVlanIsZeroTest() throws FlowNotFoundException, InvalidFlowException {
- runUpdateVlanStatisticsToZero(0, VLAN_1);
+ public void updateVlanStatisticsToZeroSrcVlanIsZeroNewVlansNullTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, VLAN_1, null, null);
}
@Test
- public void updateVlanStatisticsToZeroSrcAndVlanAreZeroTest() throws FlowNotFoundException, InvalidFlowException {
- runUpdateVlanStatisticsToZero(0, 0);
+ public void updateVlanStatisticsToZeroSrcAndDstVlanAreZeroNewVlansNullTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, 0, null, null);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroDstVlanIsZeroNewVlansZerosTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(VLAN_1, 0, 0, 0);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroSrcVlanIsZeroNewVlansZerosTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, VLAN_1, 0, 0);
+ }
+
+ @Test
+ public void updateVlanStatisticsToZeroSrcAndDstVlanAreZeroNewVlansZerosTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUpdateVlanStatisticsToZeroTest(0, 0, 0, 0);
+ }
+
+ private void runUpdateVlanStatisticsToZeroTest(
+ int oldSrcVlan, int oldDstVlan, Integer newSrcVlan, Integer newDstVLan)
+ throws FlowNotFoundException, InvalidFlowException {
+ Set<Integer> originalVlanStatistics = Sets.newHashSet(1, 2, 3);
+ Flow flow = new TestFlowBuilder()
+ .flowId(FLOW_ID_1)
+ .srcSwitch(switchA)
+ .srcVlan(oldSrcVlan)
+ .destSwitch(switchB)
+ .destVlan(oldDstVlan)
+ .vlanStatistics(originalVlanStatistics)
+ .build();
+ flowRepository.add(flow);
+
+ FlowPatch receivedFlow = FlowPatch.builder()
+ .flowId(FLOW_ID_1)
+ .vlanStatistics(new HashSet<>())
+ .source(buildPathEndpoint(newSrcVlan))
+ .destination(buildPathEndpoint(newDstVLan))
+ .build();
+
+ Flow updatedFlow = flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
+ assertTrue(updatedFlow.getVlanStatistics().isEmpty());
}
@Test(expected = IllegalArgumentException.class)
- public void unableToUpdateVlanStatisticsTest() throws FlowNotFoundException, InvalidFlowException {
+ public void unableToUpdateVlanStatisticsOldVlansSetNewVlansNullTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUnableToUpdateVlanStatisticsTest(VLAN_1, VLAN_2, null, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToUpdateVlanStatisticsOldVlansSetNewVlansSetTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUnableToUpdateVlanStatisticsTest(VLAN_1, VLAN_2, VLAN_2, VLAN_3);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void unableToUpdateVlanStatisticsOldVlansZeroNewVlansSetTest()
+ throws FlowNotFoundException, InvalidFlowException {
+ runUnableToUpdateVlanStatisticsTest(0, 0, VLAN_1, VLAN_2);
+ }
+
+ private void runUnableToUpdateVlanStatisticsTest(
+ int oldSrcVlan, int oldDstVlan, Integer newSrcVlan, Integer newDstVLan)
+ throws FlowNotFoundException, InvalidFlowException {
Flow flow = new TestFlowBuilder()
.flowId(FLOW_ID_1)
.srcSwitch(switchA)
- .srcVlan(VLAN_1)
+ .srcVlan(oldSrcVlan)
.destSwitch(switchB)
- .destVlan(VLAN_1)
+ .destVlan(oldDstVlan)
.vlanStatistics(new HashSet<>())
.build();
flowRepository.add(flow);
@@ -284,6 +369,8 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
FlowPatch receivedFlow = FlowPatch.builder()
.flowId(FLOW_ID_1)
.vlanStatistics(Sets.newHashSet(1, 2, 3))
+ .source(PatchEndpoint.builder().vlanId(newSrcVlan).build())
+ .destination(PatchEndpoint.builder().vlanId(newDstVLan).build())
.build();
flowOperationsService.updateFlow(new FlowCarrierImpl(), receivedFlow);
@@ -916,6 +1003,13 @@ public class FlowOperationsServiceTest extends InMemoryGraphBasedTest {
assertTrue(updatedFlow.getVlanStatistics().isEmpty());
}
+ private static PatchEndpoint buildPathEndpoint(Integer vlan) {
+ if (vlan == null) {
+ return null;
+ }
+ return PatchEndpoint.builder().vlanId(vlan).build();
+ }
+
private static class FlowCarrierImpl implements FlowOperationsCarrier {
@Override
public void emitPeriodicPingUpdate(String flowId, boolean enabled) { | ['src-java/nbworker-topology/nbworker-storm-topology/src/main/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsService.java', 'src-java/nbworker-topology/nbworker-storm-topology/src/test/java/org/openkilda/wfm/topology/nbworker/services/FlowOperationsServiceTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 11,743,552 | 2,453,977 | 300,577 | 3,193 | 556 | 126 | 10 | 1 | 911 | 113 | 302 | 45 | 0 | 2 | 1970-01-01T00:27:55 | 71 | Java | {'Java': 16533357, 'Groovy': 2440542, 'TypeScript': 876184, 'Python': 375764, 'JavaScript': 369015, 'HTML': 366643, 'CSS': 234005, 'C++': 89798, 'Shell': 61998, 'Dockerfile': 30647, 'Makefile': 20530, 'Gherkin': 5609, 'CMake': 4314, 'Jinja': 1187} | Apache License 2.0 |