id
int64 0
10.2k
| text_id
stringlengths 17
67
| repo_owner
stringclasses 232
values | repo_name
stringclasses 295
values | issue_url
stringlengths 39
89
| pull_url
stringlengths 37
87
| comment_url
stringlengths 37
94
| links_count
int64 1
2
| link_keyword
stringclasses 12
values | issue_title
stringlengths 7
197
| issue_body
stringlengths 45
21.3k
| base_sha
stringlengths 40
40
| head_sha
stringlengths 40
40
| diff_url
stringlengths 120
170
| diff
stringlengths 478
132k
| changed_files
stringlengths 47
2.6k
| changed_files_exts
stringclasses 22
values | changed_files_count
int64 1
22
| java_changed_files_count
int64 1
22
| kt_changed_files_count
int64 0
0
| py_changed_files_count
int64 0
0
| code_changed_files_count
int64 1
22
| repo_symbols_count
int64 32.6k
242M
| repo_tokens_count
int64 6.59k
49.2M
| repo_lines_count
int64 992
6.2M
| repo_files_without_tests_count
int64 12
28.1k
| changed_symbols_count
int64 0
36.1k
| changed_tokens_count
int64 0
6.5k
| changed_lines_count
int64 0
561
| changed_files_without_tests_count
int64 1
17
| issue_symbols_count
int64 45
21.3k
| issue_words_count
int64 2
1.39k
| issue_tokens_count
int64 13
4.47k
| issue_lines_count
int64 1
325
| issue_links_count
int64 0
19
| issue_code_blocks_count
int64 0
31
| pull_create_at
timestamp[s] | stars
int64 10
44.3k
| language
stringclasses 8
values | languages
stringclasses 296
values | license
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
528 | spring-cloud/spring-cloud-config/447/445 | spring-cloud | spring-cloud-config | https://github.com/spring-cloud/spring-cloud-config/issues/445 | https://github.com/spring-cloud/spring-cloud-config/pull/447 | https://github.com/spring-cloud/spring-cloud-config/pull/447 | 1 | fixes | NPE in VaultEnvironmentRepository.findOne | When reading a value from vault it is possible there is no value for a given key so we should check for null before trying to convert the key/value to YAML.
```
java.lang.NullPointerException: null
at org.springframework.cloud.config.server.environment.VaultEnvironmentRepository.findOne(VaultEnvironmentRepository.java:94) ~[spring-cloud-config-server-1.2.0.BUILD-SNAPSHOT.jar:1.2.0.BUILD-SNAPSHOT]
at org.springframework.cloud.config.server.environment.EnvironmentEncryptorEnvironmentRepository.findOne(EnvironmentEncryptorEnvironmentRepository.java:53) ~[spring-cloud-config-server-1.2.0.BUILD-SNAPSHOT.jar:1.2.0.BUILD-SNAPSHOT]
at org.springframework.cloud.config.server.environment.EnvironmentController.labelled(EnvironmentController.java:112) ~[spring-cloud-config-server-1.2.0.BUILD-SNAPSHOT.jar:1.2.0.BUILD-SNAPSHOT]
at org.springframework.cloud.config.server.environment.EnvironmentController.defaultLabel(EnvironmentController.java:101) ~[spring-cloud-config-server-1.2.0.BUILD-SNAPSHOT.jar:1.2.0.BUILD-SNAPSHOT]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.7.0_79]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[na:1.7.0_79]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.7.0_79]
at java.lang.reflect.Method.invoke(Method.java:606) ~[na:1.7.0_79]
at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:221) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:136) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:110) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:832) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:743) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:85) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:961) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:895) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:967) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:858) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:622) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:843) ~[spring-webmvc-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:729) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:292) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52) ~[tomcat-embed-websocket-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.boot.actuate.autoconfigure.EndpointWebMvcAutoConfiguration$ApplicationContextHeaderFilter.doFilterInternal(EndpointWebMvcAutoConfiguration.java:281) ~[spring-boot-actuator-1.3.6.RELEASE.jar:1.3.6.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.boot.actuate.trace.WebRequestTraceFilter.doFilterInternal(WebRequestTraceFilter.java:115) ~[spring-boot-actuator-1.3.6.RELEASE.jar:1.3.6.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:99) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.web.filter.HttpPutFormContentFilter.doFilterInternal(HttpPutFormContentFilter.java:87) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:77) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:121) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.springframework.boot.actuate.autoconfigure.MetricsFilter.doFilterInternal(MetricsFilter.java:103) ~[spring-boot-actuator-1.3.6.RELEASE.jar:1.3.6.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) ~[spring-web-4.2.7.RELEASE.jar:4.2.7.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:212) ~[tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:106) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:502) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:141) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:79) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:88) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:528) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1099) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:670) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1520) [tomcat-embed-core-8.0.36.jar:8.0.36]
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.run(NioEndpoint.java:1476) [tomcat-embed-core-8.0.36.jar:8.0.36]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) [na:1.7.0_79]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) [na:1.7.0_79]
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61) [tomcat-embed-core-8.0.36.jar:8.0.36]
at java.lang.Thread.run(Thread.java:745) [na:1.7.0_79]
```
| 8ad7f06c71d93446600a1b5bd129651adfbd363d | 1f0bbddc549294885ebe2f53102332bb5d13492f | https://github.com/spring-cloud/spring-cloud-config/compare/8ad7f06c71d93446600a1b5bd129651adfbd363d...1f0bbddc549294885ebe2f53102332bb5d13492f | diff --git a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/config/EnvironmentRepositoryConfiguration.java b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/config/EnvironmentRepositoryConfiguration.java
index 79a34643..3638f0b7 100644
--- a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/config/EnvironmentRepositoryConfiguration.java
+++ b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/config/EnvironmentRepositoryConfiguration.java
@@ -32,6 +32,7 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.ConfigurableEnvironment;
+import org.springframework.web.client.RestTemplate;
/**
* @author Dave Syer
@@ -106,7 +107,7 @@ public class EnvironmentRepositoryConfiguration {
protected static class VaultConfiguration {
@Bean
public EnvironmentRepository environmentRepository(HttpServletRequest request, EnvironmentWatch watch) {
- return new VaultEnvironmentRepository(request, watch);
+ return new VaultEnvironmentRepository(request, watch, new RestTemplate());
}
}
diff --git a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepository.java b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepository.java
index 57b4243f..e7e5290a 100644
--- a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepository.java
+++ b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepository.java
@@ -61,16 +61,17 @@ public class VaultEnvironmentRepository implements EnvironmentRepository {
@NotEmpty
private String profileSeparator = ",";
- private RestTemplate rest = new RestTemplate();
+ private RestTemplate rest;
//TODO: move to watchState:String on findOne?
private HttpServletRequest request;
private EnvironmentWatch watch;
- public VaultEnvironmentRepository(HttpServletRequest request, EnvironmentWatch watch) {
+ public VaultEnvironmentRepository(HttpServletRequest request, EnvironmentWatch watch, RestTemplate rest) {
this.request = request;
this.watch = watch;
+ this.rest = rest;
}
@Override
@@ -89,13 +90,15 @@ public class VaultEnvironmentRepository implements EnvironmentRepository {
for (String key : keys) {
// read raw 'data' key from vault
String data = read(key);
- // data is in json format of which, yaml is a superset, so parse
- final YamlPropertiesFactoryBean yaml = new YamlPropertiesFactoryBean();
- yaml.setResources(new ByteArrayResource(data.getBytes()));
- Properties properties = yaml.getObject();
-
- if (!properties.isEmpty()) {
- environment.add(new PropertySource("vault:"+key, properties));
+ if (data != null) {
+ // data is in json format of which, yaml is a superset, so parse
+ final YamlPropertiesFactoryBean yaml = new YamlPropertiesFactoryBean();
+ yaml.setResources(new ByteArrayResource(data.getBytes()));
+ Properties properties = yaml.getObject();
+
+ if (!properties.isEmpty()) {
+ environment.add(new PropertySource("vault:" + key, properties));
+ }
}
}
diff --git a/spring-cloud-config-server/src/test/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepositoryTests.java b/spring-cloud-config-server/src/test/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepositoryTests.java
index d213f1b7..a3962d85 100644
--- a/spring-cloud-config-server/src/test/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepositoryTests.java
+++ b/spring-cloud-config-server/src/test/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepositoryTests.java
@@ -1,14 +1,74 @@
package org.springframework.cloud.config.server.environment;
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Before;
import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.cloud.config.environment.Environment;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.web.client.RestTemplate;
/**
* @author Spencer Gibb
+ * @author Ryan Baxter
*/
public class VaultEnvironmentRepositoryTests {
+
+ @Before
+ public void init() {}
+
@Test
- public void testFindOne() {
- //TODO: implement testFindOne
+ public void testFindOne() throws IOException {
+ MockHttpServletRequest configRequest = new MockHttpServletRequest();
+ configRequest.addHeader("X-CONFIG-TOKEN", "mytoken");
+ RestTemplate rest = Mockito.mock(RestTemplate.class);
+ ResponseEntity<VaultEnvironmentRepository.VaultResponse> myAppResp = Mockito.mock(ResponseEntity.class);
+ Mockito.when(myAppResp.getStatusCode()).thenReturn(HttpStatus.OK);
+ VaultEnvironmentRepository.VaultResponse myAppVaultResp = Mockito.mock(VaultEnvironmentRepository.VaultResponse.class);
+ Mockito.when(myAppVaultResp.getData()).thenReturn("{\\"foo\\":\\"bar\\"}");
+ Mockito.when(myAppResp.getBody()).thenReturn(myAppVaultResp);
+ Mockito.when(rest.exchange(Mockito.eq("http://127.0.0.1:8200/v1/{backend}/{key}"),
+ Mockito.eq(HttpMethod.GET), Mockito.any(HttpEntity.class), Mockito.eq(VaultEnvironmentRepository.VaultResponse.class),
+ Mockito.eq("secret"), Mockito.eq("myapp"))).thenReturn(myAppResp);
+ ResponseEntity<VaultEnvironmentRepository.VaultResponse> appResp = Mockito.mock(ResponseEntity.class);
+ Mockito.when(appResp.getStatusCode()).thenReturn(HttpStatus.OK);
+ VaultEnvironmentRepository.VaultResponse appVaultResp = Mockito.mock(VaultEnvironmentRepository.VaultResponse.class);
+ Mockito.when(appVaultResp.getData()).thenReturn(null);
+ Mockito.when(appResp.getBody()).thenReturn(appVaultResp);
+ Mockito.when(rest.exchange(Mockito.eq("http://127.0.0.1:8200/v1/{backend}/{key}"),
+ Mockito.eq(HttpMethod.GET), Mockito.any(HttpEntity.class), Mockito.eq(VaultEnvironmentRepository.VaultResponse.class),
+ Mockito.eq("secret"), Mockito.eq("application"))).thenReturn(appResp);
+ VaultEnvironmentRepository repo = new VaultEnvironmentRepository(configRequest, new EnvironmentWatch.Default(), rest);
+ Environment e = repo.findOne("myapp", null, null);
+ assertEquals("myapp", e.getName());
+ Map<String,String> result = new HashMap<String,String>();
+ result.put("foo", "bar");
+ assertEquals(result, e.getPropertySources().get(0).getSource());
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void missingConfigToken() throws IOException {
+ MockHttpServletRequest configRequest = new MockHttpServletRequest();
+ RestTemplate rest = Mockito.mock(RestTemplate.class);
+ ResponseEntity<VaultEnvironmentRepository.VaultResponse> myAppResp = Mockito.mock(ResponseEntity.class);
+ Mockito.when(myAppResp.getStatusCode()).thenReturn(HttpStatus.OK);
+ VaultEnvironmentRepository.VaultResponse myAppVaultResp = Mockito.mock(VaultEnvironmentRepository.VaultResponse.class);
+ Mockito.when(myAppVaultResp.getData()).thenReturn("{\\"foo\\":\\"bar\\"}");
+ Mockito.when(myAppResp.getBody()).thenReturn(myAppVaultResp);
+ Mockito.when(rest.exchange(Mockito.eq("http://127.0.0.1:8200/v1/{backend}/{key}"),
+ Mockito.eq(HttpMethod.GET), Mockito.any(HttpEntity.class), Mockito.eq(VaultEnvironmentRepository.VaultResponse.class),
+ Mockito.eq("secret"), Mockito.eq("myapp"))).thenReturn(myAppResp);
+ VaultEnvironmentRepository repo = new VaultEnvironmentRepository(configRequest, new EnvironmentWatch.Default(), rest);
+ repo.findOne("myapp", null, null);
}
} | ['spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/config/EnvironmentRepositoryConfiguration.java', 'spring-cloud-config-server/src/test/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepositoryTests.java', 'spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/VaultEnvironmentRepository.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 210,662 | 43,951 | 6,718 | 66 | 1,244 | 246 | 24 | 2 | 10,076 | 243 | 2,899 | 75 | 0 | 1 | 2016-07-19T13:45:40 | 1,895 | Java | {'Java': 1817085, 'Shell': 65648, 'Ruby': 488, 'Groovy': 140} | Apache License 2.0 |
526 | spring-cloud/spring-cloud-config/1832/1806 | spring-cloud | spring-cloud-config | https://github.com/spring-cloud/spring-cloud-config/issues/1806 | https://github.com/spring-cloud/spring-cloud-config/pull/1832 | https://github.com/spring-cloud/spring-cloud-config/pull/1832 | 1 | fixes | Actuator health check missing details in 3.0.x | **Describe the bug**
Hi, using cloud config client version 3.0.0 and native profile for configuration I always get that configuration property sources could not be loaded and status is UNKNOWN when calling the actuator health check.
```
"clientConfigServer": {
"status": "UNKNOWN",
"details": {
"error": "no property sources located"
}
}
```
Comparing to spring boot 1.x I was receiving:
```
"configServer": {
"status": "UP",
"propertySources": [
"file:C:\\\\Users\\user\\projects\\tagging.yml",
]
}
```
Here is my configuration:
```
spring:
application:
name: my-app
cloud:
config:
discovery:
enabled: true
serviceId: config-server
failFast: false
retry:
initialInterval: 10000
maxInterval: 60000
maxAttempts: 12
multiplier: 10.1
```
I see also the log that probably should be "CompositePropertySource" but is Bootstrap one:
`2021-02-23 10:11:15.402 INFO b.c.PropertySourceBootstrapConfiguration : Located property source: [BootstrapPropertySource {name='bootstrapProperties-configClient'},
BootstrapPropertySource {name='bootstrapProperties-file:C:\\Users\\user\\properties.yml'}]` | 351fabfc6cf2fc0dd5a884e4dcf53edf946669e1 | 964238a5f245fb8faf38e8f2dc26469d22054e9d | https://github.com/spring-cloud/spring-cloud-config/compare/351fabfc6cf2fc0dd5a884e4dcf53edf946669e1...964238a5f245fb8faf38e8f2dc26469d22054e9d | diff --git a/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerConfigDataLoader.java b/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerConfigDataLoader.java
index 57a36154..0f7e9227 100644
--- a/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerConfigDataLoader.java
+++ b/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerConfigDataLoader.java
@@ -57,6 +57,11 @@ import static org.springframework.cloud.config.client.ConfigClientProperties.TOK
public class ConfigServerConfigDataLoader implements ConfigDataLoader<ConfigServerConfigDataResource>, Ordered {
+ /**
+ * PropertySource name for the config client.
+ */
+ public static final String CONFIG_CLIENT_PROPERTYSOURCE_NAME = "configClient";
+
protected final Log logger;
public ConfigServerConfigDataLoader(Log logger) {
@@ -123,7 +128,7 @@ public class ConfigServerConfigDataLoader implements ConfigDataLoader<ConfigServ
}
// the existence of this property source confirms a successful
// response from config server
- composite.add(0, new MapPropertySource("configClient", map));
+ composite.add(0, new MapPropertySource(CONFIG_CLIENT_PROPERTYSOURCE_NAME, map));
try {
return new ConfigData(composite, Option.IGNORE_IMPORTS, Option.IGNORE_PROFILES);
}
@@ -159,7 +164,8 @@ public class ConfigServerConfigDataLoader implements ConfigDataLoader<ConfigServ
throw new IllegalStateException("Could not locate PropertySource and " + reason + ", failing"
+ (errorBody == null ? "" : ": " + errorBody), error);
}
- logger.warn("Could not locate PropertySource (" + resource + "): " + (error != null ? error.getMessage() : errorBody));
+ logger.warn("Could not locate PropertySource (" + resource + "): "
+ + (error != null ? error.getMessage() : errorBody));
return null;
}
diff --git a/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerHealthIndicator.java b/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerHealthIndicator.java
index 2a8fde5b..4a6bc91c 100644
--- a/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerHealthIndicator.java
+++ b/spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerHealthIndicator.java
@@ -18,14 +18,18 @@ package org.springframework.cloud.config.client;
import java.util.ArrayList;
import java.util.List;
+import java.util.stream.Collectors;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
import org.springframework.boot.actuate.health.Health.Builder;
import org.springframework.core.env.CompositePropertySource;
import org.springframework.core.env.ConfigurableEnvironment;
-import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertySource;
+import static org.springframework.cloud.bootstrap.config.PropertySourceBootstrapConfiguration.BOOTSTRAP_PROPERTY_SOURCE_NAME;
+import static org.springframework.cloud.config.client.ConfigServerConfigDataLoader.CONFIG_CLIENT_PROPERTYSOURCE_NAME;
+import static org.springframework.cloud.config.client.ConfigServerConfigDataLocationResolver.PREFIX;
+
/**
* @author Spencer Gibb
* @author Marcos Barbero
@@ -38,7 +42,7 @@ public class ConfigServerHealthIndicator extends AbstractHealthIndicator {
private long lastAccess = 0;
- private PropertySource<?> cached;
+ private List<PropertySource<?>> cached = new ArrayList<>();
public ConfigServerHealthIndicator(ConfigurableEnvironment environment, ConfigClientHealthProperties properties) {
this.environment = environment;
@@ -47,29 +51,38 @@ public class ConfigServerHealthIndicator extends AbstractHealthIndicator {
@Override
protected void doHealthCheck(Builder builder) {
- PropertySource<?> propertySource = getPropertySource();
- builder.up();
- if (propertySource instanceof CompositePropertySource) {
+ List<PropertySource<?>> propertySources = getPropertySource();
+ if (propertySources.isEmpty()) {
+ builder.unknown();
+ builder.unknown().withDetail("error", "no property sources located");
+ }
+ else {
+ builder.up();
List<String> sources = new ArrayList<>();
- for (PropertySource<?> ps : ((CompositePropertySource) propertySource).getPropertySources()) {
- sources.add(ps.getName());
+ for (PropertySource<?> propertySource : propertySources) {
+
+ if (propertySource instanceof CompositePropertySource) {
+ for (PropertySource<?> ps : ((CompositePropertySource) propertySource).getPropertySources()) {
+ sources.add(ps.getName());
+ }
+ }
+ else if (propertySource != null) {
+ sources.add(propertySource.getName());
+ }
}
builder.withDetail("propertySources", sources);
}
- else if (propertySource != null) {
- builder.withDetail("propertySources", propertySource.toString());
- }
- else {
- builder.unknown().withDetail("error", "no property sources located");
- }
}
- private PropertySource<?> getPropertySource() {
+ private List<PropertySource<?>> getPropertySource() {
long accessTime = System.currentTimeMillis();
if (isCacheStale(accessTime)) {
this.lastAccess = accessTime;
- MutablePropertySources propertySources = this.environment.getPropertySources();
- this.cached = propertySources.get("configClient");
+ this.cached = this.environment.getPropertySources().stream()
+ .filter(p -> p.getName().startsWith(CONFIG_CLIENT_PROPERTYSOURCE_NAME)
+ || p.getName().startsWith(BOOTSTRAP_PROPERTY_SOURCE_NAME + "-")
+ || p.getName().startsWith(PREFIX))
+ .collect(Collectors.toList());
}
return this.cached;
}
diff --git a/spring-cloud-config-client/src/test/java/org/springframework/cloud/config/client/ConfigServerHealthIndicatorTests.java b/spring-cloud-config-client/src/test/java/org/springframework/cloud/config/client/ConfigServerHealthIndicatorTests.java
index c527626f..e04959db 100644
--- a/spring-cloud-config-client/src/test/java/org/springframework/cloud/config/client/ConfigServerHealthIndicatorTests.java
+++ b/spring-cloud-config-client/src/test/java/org/springframework/cloud/config/client/ConfigServerHealthIndicatorTests.java
@@ -17,6 +17,8 @@
package org.springframework.cloud.config.client;
import java.util.Collections;
+import java.util.List;
+import java.util.Map;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -62,13 +64,23 @@ public class ConfigServerHealthIndicatorTests {
@Test
public void testServerUp() {
setupPropertySources();
+ Map<String, Object> details = this.indicator.getHealth(true).getDetails();
+ List<String> propertySources = (List) details.get("propertySources");
+ assertThat(propertySources.contains("bootstrapProperties-test")).isTrue();
+ assertThat(propertySources.contains("configserver:test")).isTrue();
+ assertThat(propertySources.contains("configClient")).isTrue();
+ assertThat(propertySources.size()).isEqualTo(3);
assertThat(this.indicator.health().getStatus()).isEqualTo(Status.UP);
}
protected void setupPropertySources() {
PropertySource<?> source = new MapPropertySource("configClient", Collections.emptyMap());
+ PropertySource<?> configServerSource = new MapPropertySource("configserver:test", Collections.emptyMap());
+ PropertySource<?> bootstrapSource = new MapPropertySource("bootstrapProperties-test", Collections.emptyMap());
MutablePropertySources sources = new MutablePropertySources();
sources.addFirst(source);
+ sources.addFirst(bootstrapSource);
+ sources.addFirst(configServerSource);
doReturn(sources).when(this.environment).getPropertySources();
}
| ['spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerHealthIndicator.java', 'spring-cloud-config-client/src/main/java/org/springframework/cloud/config/client/ConfigServerConfigDataLoader.java', 'spring-cloud-config-client/src/test/java/org/springframework/cloud/config/client/ConfigServerHealthIndicatorTests.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 616,189 | 128,445 | 19,054 | 184 | 2,702 | 547 | 55 | 2 | 1,222 | 124 | 294 | 45 | 0 | 3 | 2021-03-15T20:43:57 | 1,895 | Java | {'Java': 1817085, 'Shell': 65648, 'Ruby': 488, 'Groovy': 140} | Apache License 2.0 |
525 | spring-cloud/spring-cloud-config/2187/2085 | spring-cloud | spring-cloud-config | https://github.com/spring-cloud/spring-cloud-config/issues/2085 | https://github.com/spring-cloud/spring-cloud-config/pull/2187 | https://github.com/spring-cloud/spring-cloud-config/pull/2187 | 1 | fixes | Spring Cloud Vault Health Check Fails | Hey guys hope all is well.
I had a quick question about the health check for config server’s vault backend.
I seem to be having some trouble with it – I’m not able to access health for vault repo without an exception being thrown “No thread-bound request found”.
To illustrate the problem, I connect to vault backend. I created a ScheduledExecutorService that calls health on every HealthIndicator, every 30 seconds. The health check to vault repo fails.
On the other hand if I curl the actuator health endpoint the health check to vault repo, the health check succeeds. (curl localhost:8888/actuator/health).
I think this may have to do with ThreadLocal usage of RequestAttributes from ObjectProvider(HttpServletRequest) request called from AbstractVaultEnvironmentRepository::getWatchState.
I am using all the latest versions of projects: spring-cloud-config-server 3.1.2, spring-cloud-starter-bootstrap 3.1.2 and spring-boot-starter-actuator 2.6.7.
I’ve attached my simple, repeatable project here: https://github.com/speedo72/spring-cloud-config-vault-health
Thanks for any help.
| 8e4be15ece7418fe0714941ae0f0bfad4ce82063 | 7849124e5b2701b8c5bbad91c358c74065c7fcff | https://github.com/spring-cloud/spring-cloud-config/compare/8e4be15ece7418fe0714941ae0f0bfad4ce82063...7849124e5b2701b8c5bbad91c358c74065c7fcff | diff --git a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/AbstractVaultEnvironmentRepository.java b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/AbstractVaultEnvironmentRepository.java
index 3f4e4a30..5fb70bb8 100644
--- a/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/AbstractVaultEnvironmentRepository.java
+++ b/spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/AbstractVaultEnvironmentRepository.java
@@ -25,6 +25,9 @@ import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
import javax.validation.constraints.NotEmpty;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.config.YamlPropertiesFactoryBean;
import org.springframework.cloud.config.environment.Environment;
@@ -44,6 +47,8 @@ import static org.springframework.cloud.config.client.ConfigClientProperties.STA
*/
public abstract class AbstractVaultEnvironmentRepository implements EnvironmentRepository, Ordered {
+ private static Log log = LogFactory.getLog(AbstractVaultEnvironmentRepository.class);
+
// TODO: move to watchState:String on findOne?
protected final ObjectProvider<HttpServletRequest> request;
@@ -104,8 +109,14 @@ public abstract class AbstractVaultEnvironmentRepository implements EnvironmentR
private String getWatchState() {
HttpServletRequest servletRequest = this.request.getIfAvailable();
if (servletRequest != null) {
- String state = servletRequest.getHeader(STATE_HEADER);
- return this.watch.watch(state);
+ try {
+ String state = servletRequest.getHeader(STATE_HEADER);
+ return this.watch.watch(state);
+ }
+ catch (IllegalStateException e) {
+ log.debug("Could not get state.", e);
+ return null;
+ }
}
return null;
} | ['spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/AbstractVaultEnvironmentRepository.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 706,247 | 146,623 | 21,682 | 207 | 491 | 97 | 15 | 1 | 1,109 | 146 | 242 | 17 | 1 | 0 | 2022-11-10T19:20:02 | 1,895 | Java | {'Java': 1817085, 'Shell': 65648, 'Ruby': 488, 'Groovy': 140} | Apache License 2.0 |
65 | webbukkit/dynmap/3990/3982 | webbukkit | dynmap | https://github.com/webbukkit/dynmap/issues/3982 | https://github.com/webbukkit/dynmap/pull/3990 | https://github.com/webbukkit/dynmap/pull/3990#issuecomment-1628628414 | 1 | fixes | Dynmap does not disable properly if using wrong platform | **Issue Description:** *Dynmap does not disable properly, when on the wrong version of bukkit forks*
* **Dynmap Version:** *3.0<version<3.6*
* **Server Version:** *1.20.1*
* **Steps to Replicate:** *Run dynmap 3.5 or lower for a unsupported platform, and have another plugin with dynmap as a dependency,*
Sorry for ruining your template. Doing this in DynmapPlugin#onEnable should work right? (this is really an issue in 3.6 as well, what I noticed):
```java
if (helper == null) {
Log.info("Dynmap is disabled (unsupported platform)");
this.setEnabled(false); // Added this line
return;
}
```
[x] *I have looked at all other issues and this is not a duplicate*
[x] *I have been able to replicate this*
| 87d8c7394151122bf77163c740d27f270213d62b | 2503dbfdbb1fe3cb0f7acd4c2f4ca1ccd6349eb6 | https://github.com/webbukkit/dynmap/compare/87d8c7394151122bf77163c740d27f270213d62b...2503dbfdbb1fe3cb0f7acd4c2f4ca1ccd6349eb6 | diff --git a/spigot/src/main/java/org/dynmap/bukkit/DynmapPlugin.java b/spigot/src/main/java/org/dynmap/bukkit/DynmapPlugin.java
index b2ad4e66..d54154fc 100644
--- a/spigot/src/main/java/org/dynmap/bukkit/DynmapPlugin.java
+++ b/spigot/src/main/java/org/dynmap/bukkit/DynmapPlugin.java
@@ -914,6 +914,7 @@ public class DynmapPlugin extends JavaPlugin implements DynmapAPI {
}
if (helper == null) {
Log.info("Dynmap is disabled (unsupported platform)");
+ this.setEnabled(false);
return;
}
PluginDescriptionFile pdfFile = this.getDescription(); | ['spigot/src/main/java/org/dynmap/bukkit/DynmapPlugin.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,989,818 | 1,134,908 | 141,482 | 805 | 36 | 5 | 1 | 1 | 777 | 110 | 195 | 18 | 0 | 1 | 2023-07-10T09:49:22 | 1,882 | Java | {'Java': 4992284, 'JavaScript': 528125, 'PHP': 56381, 'CSS': 34930, 'HTML': 7454, 'ASP.NET': 1292} | Apache License 2.0 |
66 | webbukkit/dynmap/3820/3819 | webbukkit | dynmap | https://github.com/webbukkit/dynmap/issues/3819 | https://github.com/webbukkit/dynmap/pull/3820 | https://github.com/webbukkit/dynmap/pull/3820 | 1 | fix | IllegalStateException: Asynchronous preparation of chunk data for async save! | **Issue Description:** *Dynmap errors on shutdown if it is actively rendering / saving tiles.*
* **Dynmap Version:** *dynmap version 3.4-819*
* **Server Version:** *git-Purpur-1752 (Paper 1.19.2)*
* **Pastebin of Configuration.txt:** *https://paste.gg/p/anonymous/84efb69a60124fe8b884a427bd745959/files/d92f9ce23d4d44c9adac79751491cc78/raw*
* **Server Host (if applicable):** *Selfhosted*
* **Pastebin of crashlogs or other relevant logs:** *https://paste.gg/p/anonymous/589ea0998bd846d7ad1017f5182c0a33/files/7cbd3245509949f3914ee68f68332974/raw*
* **Steps to Replicate:**
- Generate new chunks and have dynmap queue up a backlog of tiles to render.
- Shut down server to restart it.
- See errors during shutdown.
```
[02:56:02] [Dynmap Render Thread/ERROR]: Thread Dynmap Render Thread failed main thread check: preparation of chunk data for async save
java.lang.Throwable: null
at org.spigotmc.AsyncCatcher.catchOp(AsyncCatcher.java:15) ~[purpur-1.19.2.jar:git-Purpur-1752]
at net.minecraft.world.level.chunk.storage.ChunkSerializer.getAsyncSaveData(ChunkSerializer.java:465) ~[?:?]
at jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:104) ~[?:?]
at java.lang.reflect.Method.invoke(Method.java:577) ~[?:?]
at org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.lambda$getLoadedChunk$6(AsyncChunkProvider119.java:96) ~[Dynmap-HEAD-spigot.jar:?]
at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1768) ~[?:?]
at net.minecraft.util.thread.BlockableEventLoop.execute(BlockableEventLoop.java:100) ~[?:?]
at java.util.concurrent.CompletableFuture.asyncSupplyStage(CompletableFuture.java:1782) ~[?:?]
at java.util.concurrent.CompletableFuture.supplyAsync(CompletableFuture.java:2005) ~[?:?]
at org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.getLoadedChunk(AsyncChunkProvider119.java:94) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.bukkit.helper.v119.MapChunkCache119.getLoadedChunkAsync(MapChunkCache119.java:40) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.common.chunk.GenericMapChunkCache.getLoadedChunksAsync(GenericMapChunkCache.java:803) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.common.chunk.GenericMapChunkCache.loadChunksAsync(GenericMapChunkCache.java:844) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.bukkit.DynmapPlugin$BukkitServer.createMapChunkCache(DynmapPlugin.java:570) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.MapManager$FullWorldRenderState.processTile(MapManager.java:767) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.MapManager$FullWorldRenderState.run(MapManager.java:726) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.MapManager$DynmapScheduledThreadPoolExecutor$1.run(MapManager.java:234) ~[Dynmap-HEAD-spigot.jar:?]
at org.dynmap.MapManager$DynmapScheduledThreadPoolExecutor$2.run(MapManager.java:252) ~[Dynmap-HEAD-spigot.jar:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539) ~[?:?]
at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304) ~[?:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) ~[?:?]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) ~[?:?]
at java.lang.Thread.run(Thread.java:833) ~[?:?]
[02:56:02] [Dynmap Render Thread/ERROR]: [dynmap] Exception during render job: world=world, map=null
[02:56:02] [Dynmap Render Thread/WARN]: java.lang.RuntimeException: java.util.concurrent.ExecutionException: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.lambda$getLoadedChunk$7(AsyncChunkProvider119.java:114)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.MapChunkCache119.lambda$getLoadedChunkAsync$0(MapChunkCache119.java:42)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.common.chunk.GenericMapChunkCache.lambda$getLoadedChunksAsync$1(GenericMapChunkCache.java:829)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.common.chunk.GenericMapChunkCache.getLoadedChunksAsync(GenericMapChunkCache.java:827)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.common.chunk.GenericMapChunkCache.loadChunksAsync(GenericMapChunkCache.java:844)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.DynmapPlugin$BukkitServer.createMapChunkCache(DynmapPlugin.java:570)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.MapManager$FullWorldRenderState.processTile(MapManager.java:767)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.MapManager$FullWorldRenderState.run(MapManager.java:726)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.MapManager$DynmapScheduledThreadPoolExecutor$1.run(MapManager.java:234)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.MapManager$DynmapScheduledThreadPoolExecutor$2.run(MapManager.java:252)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.lang.Thread.run(Thread.java:833)
[02:56:02] [Dynmap Render Thread/WARN]: Caused by: java.util.concurrent.ExecutionException: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:396)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2073)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.lambda$getLoadedChunk$7(AsyncChunkProvider119.java:112)
[02:56:02] [Dynmap Render Thread/WARN]: ... 16 more
[02:56:02] [Dynmap Render Thread/WARN]: Caused by: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.lambda$getLoadedChunk$6(AsyncChunkProvider119.java:98)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1768)
[02:56:02] [Dynmap Render Thread/WARN]: at net.minecraft.util.thread.IAsyncTaskHandler.execute(IAsyncTaskHandler.java:100)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.CompletableFuture.asyncSupplyStage(CompletableFuture.java:1782)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.util.concurrent.CompletableFuture.supplyAsync(CompletableFuture.java:2005)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.getLoadedChunk(AsyncChunkProvider119.java:94)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.MapChunkCache119.getLoadedChunkAsync(MapChunkCache119.java:40)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.common.chunk.GenericMapChunkCache.getLoadedChunksAsync(GenericMapChunkCache.java:803)
[02:56:02] [Dynmap Render Thread/WARN]: ... 12 more
[02:56:02] [Dynmap Render Thread/WARN]: Caused by: java.lang.reflect.InvocationTargetException
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:119)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/java.lang.reflect.Method.invoke(Method.java:577)
[02:56:02] [Dynmap Render Thread/WARN]: at Dynmap-HEAD-spigot.jar//org.dynmap.bukkit.helper.v119.AsyncChunkProvider119.lambda$getLoadedChunk$6(AsyncChunkProvider119.java:96)
[02:56:02] [Dynmap Render Thread/WARN]: ... 19 more
[02:56:02] [Dynmap Render Thread/WARN]: Caused by: java.lang.IllegalStateException: Asynchronous preparation of chunk data for async save!
[02:56:02] [Dynmap Render Thread/WARN]: at org.spigotmc.AsyncCatcher.catchOp(AsyncCatcher.java:16)
[02:56:02] [Dynmap Render Thread/WARN]: at net.minecraft.world.level.chunk.storage.ChunkRegionLoader.getAsyncSaveData(ChunkRegionLoader.java:465)
[02:56:02] [Dynmap Render Thread/WARN]: at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:104)
[02:56:02] [Dynmap Render Thread/WARN]: ... 21 more
```
- [x] *I have looked at all other issues and this is not a duplicate*
- [x] *I have been able to replicate this*
| f89777a0dd1ac9e17f595ef0361a030f53eff92a | 36924b494287bdd2da1b074b676ac5f4161953a2 | https://github.com/webbukkit/dynmap/compare/f89777a0dd1ac9e17f595ef0361a030f53eff92a...36924b494287bdd2da1b074b676ac5f4161953a2 | diff --git a/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/AsyncChunkProvider118_2.java b/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/AsyncChunkProvider118_2.java
index 745527e6..03e831f7 100644
--- a/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/AsyncChunkProvider118_2.java
+++ b/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/AsyncChunkProvider118_2.java
@@ -93,6 +93,9 @@ public class AsyncChunkProvider118_2 {
}
//prepare data synchronously
CompletableFuture<?> future = CompletableFuture.supplyAsync(() -> {
+ //Null will mean that we save with spigot methods, which may be risky on async
+ //Since we're not in main thread, it now refuses new tasks because of shutdown, the risk is lower
+ if (!Bukkit.isPrimaryThread()) return null;
try {
return getAsyncSaveData.invoke(null, world.getHandle(), c);
} catch (IllegalAccessException | InvocationTargetException e) {
@@ -103,15 +106,21 @@ public class AsyncChunkProvider118_2 {
if (++currChunks > MapManager.mapman.getMaxChunkLoadsPerTick()) {
try {
Thread.sleep(25); //hold the lock so other threads also won't stress main thread
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
+ } catch (InterruptedException ignored) {}
}
//save data asynchronously
return () -> {
+ Object o = null;
try {
- return (NBTTagCompound) save.invoke(null, world.getHandle(), c, future.get());
- } catch (ReflectiveOperationException | ExecutionException | InterruptedException e) {
+ o = future.get();
+ return (NBTTagCompound) save.invoke(null, world.getHandle(), c, o);
+ } catch (InterruptedException e) {
+ return null;
+ } catch (InvocationTargetException e) {
+ //We tried to use simple spigot methods at shutdown and failed, hopes for reading from disk
+ if (o == null) return null;
+ throw new RuntimeException(e);
+ } catch (ReflectiveOperationException | ExecutionException e) {
throw new RuntimeException(e);
}
};
diff --git a/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/MapChunkCache118_2.java b/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/MapChunkCache118_2.java
index c96b6670..21b94ea3 100644
--- a/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/MapChunkCache118_2.java
+++ b/bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/MapChunkCache118_2.java
@@ -22,7 +22,7 @@ import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.ExecutionException;
import java.util.function.Supplier;
/**
@@ -61,7 +61,14 @@ public class MapChunkCache118_2 extends GenericMapChunkCache {
try {
CompletableFuture<NBTTagCompound> nbt = provider.getChunk(((CraftWorld) w).getHandle(), chunk.x, chunk.z);
return () -> {
- NBTTagCompound compound = nbt.join();
+ NBTTagCompound compound;
+ try {
+ compound = nbt.get();
+ } catch (InterruptedException e) {
+ return null;
+ } catch (ExecutionException e) {
+ throw new RuntimeException(e);
+ }
return compound == null ? null : parseChunkFromNBT(new NBT.NBTCompound(compound));
};
} catch (InvocationTargetException | IllegalAccessException ignored) {
diff --git a/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/AsyncChunkProvider119.java b/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/AsyncChunkProvider119.java
index ac93ee97..eef534b0 100644
--- a/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/AsyncChunkProvider119.java
+++ b/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/AsyncChunkProvider119.java
@@ -92,6 +92,9 @@ public class AsyncChunkProvider119 {
}
//prepare data synchronously
CompletableFuture<?> future = CompletableFuture.supplyAsync(() -> {
+ //Null will mean that we save with spigot methods, which may be risky on async
+ //Since we're not in main thread, it now refuses new tasks because of shutdown, the risk is lower
+ if (!Bukkit.isPrimaryThread()) return null;
try {
return getAsyncSaveData.invoke(null, world.getHandle(), c);
} catch (ReflectiveOperationException e) {
@@ -102,15 +105,21 @@ public class AsyncChunkProvider119 {
if (++currChunks > MapManager.mapman.getMaxChunkLoadsPerTick()) {
try {
Thread.sleep(25); //hold the lock so other threads also won't stress main thread
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
+ } catch (InterruptedException ignored) {}
}
//save data asynchronously
return () -> {
+ Object o = null;
try {
- return (NBTTagCompound) save.invoke(null, world.getHandle(), c, future.get());
- } catch (ReflectiveOperationException | ExecutionException | InterruptedException e) {
+ o = future.get();
+ return (NBTTagCompound) save.invoke(null, world.getHandle(), c, o);
+ } catch (InterruptedException e) {
+ return null;
+ } catch (InvocationTargetException e) {
+ //We tried to use simple spigot methods at shutdown and failed, hopes for reading from disk
+ if (o == null) return null;
+ throw new RuntimeException(e);
+ } catch (ReflectiveOperationException | ExecutionException e) {
throw new RuntimeException(e);
}
};
diff --git a/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/MapChunkCache119.java b/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/MapChunkCache119.java
index a94c0898..5ecbc277 100644
--- a/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/MapChunkCache119.java
+++ b/bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/MapChunkCache119.java
@@ -19,6 +19,7 @@ import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
import java.util.function.Supplier;
/**
@@ -58,7 +59,14 @@ public class MapChunkCache119 extends GenericMapChunkCache {
try {
CompletableFuture<NBTTagCompound> nbt = provider.getChunk(((CraftWorld) w).getHandle(), chunk.x, chunk.z);
return () -> {
- NBTTagCompound compound = nbt.join();
+ NBTTagCompound compound;
+ try {
+ compound = nbt.get();
+ } catch (InterruptedException e) {
+ return null;
+ } catch (ExecutionException e) {
+ throw new RuntimeException(e);
+ }
return compound == null ? null : parseChunkFromNBT(new NBT.NBTCompound(compound));
};
} catch (InvocationTargetException | IllegalAccessException ignored) { | ['bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/AsyncChunkProvider119.java', 'bukkit-helper-119/src/main/java/org/dynmap/bukkit/helper/v119/MapChunkCache119.java', 'bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/AsyncChunkProvider118_2.java', 'bukkit-helper-118-2/src/main/java/org/dynmap/bukkit/helper/v118_2/MapChunkCache118_2.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 4,555,274 | 1,037,221 | 128,728 | 709 | 3,249 | 562 | 59 | 4 | 9,467 | 486 | 2,690 | 89 | 2 | 1 | 2022-08-08T08:03:51 | 1,882 | Java | {'Java': 4992284, 'JavaScript': 528125, 'PHP': 56381, 'CSS': 34930, 'HTML': 7454, 'ASP.NET': 1292} | Apache License 2.0 |
67 | webbukkit/dynmap/3766/3765 | webbukkit | dynmap | https://github.com/webbukkit/dynmap/issues/3765 | https://github.com/webbukkit/dynmap/pull/3766 | https://github.com/webbukkit/dynmap/pull/3766 | 1 | close | Schema version set wrong PostgreSQL | **Issue Description:** When using PostgreSQL as map storage the schema version gets misaligned which breaks markers
* **Dynmap Version:** version 3.4-beta-3 (core version 3.4-beta-3-774)
* **Server Version:** fabric 1.18.2 (0.51.1+1.18.2) PostgreSQL 14.2
* **Pastebin of Configuration.txt:** [click](https://gist.github.com/TheRijn/2d037d8e3e155c656cf2e2f83a8dce8a#file-config-yml)
* **Server Host (if applicable):** Selfhosted via Docker Compose
* **Pastebin of crashlogs or other relevant logs:** [click (part of the same Gist as above)](https://gist.github.com/TheRijn/2d037d8e3e155c656cf2e2f83a8dce8a#file-logs)
* **Steps to Replicate:**
I had problems with markers sometime after migrating to PostgreSQL, so I started investigating by replacing my config.txt with a new one with a new database. This worked fine, I saw in the logs that it had problems with migrating so I paid particular attention to the schema version on the fresh start of Dynmap. It started with version 4. Everything was working again.
After a server restart, I saw the same issues with markers again. And in the logs, I saw `[Dynmap] Updating database schema from version = 3` again. But I was sure it already was on version 4.
Logging in into the database I saw the `schemaversion` was indeed set 3, I changed it manually to 4 and everything worked again!
- [x] *I have looked at all other issues and this is not a duplicate*
- [x] *I have been able to replicate this* | 9c80489ec6ead0ef71a2635234bf48f9d891984d | 536b96a5f946af9ff26ef7c85aa45d0a0c59b3f3 | https://github.com/webbukkit/dynmap/compare/9c80489ec6ead0ef71a2635234bf48f9d891984d...536b96a5f946af9ff26ef7c85aa45d0a0c59b3f3 | diff --git a/DynmapCore/src/main/java/org/dynmap/storage/postgresql/PostgreSQLMapStorage.java b/DynmapCore/src/main/java/org/dynmap/storage/postgresql/PostgreSQLMapStorage.java
index 361e445a..533bd1e9 100644
--- a/DynmapCore/src/main/java/org/dynmap/storage/postgresql/PostgreSQLMapStorage.java
+++ b/DynmapCore/src/main/java/org/dynmap/storage/postgresql/PostgreSQLMapStorage.java
@@ -468,7 +468,7 @@ public class PostgreSQLMapStorage extends MapStorage {
doUpdate(c, "CREATE TABLE " + tableStandaloneFiles + " (FileName VARCHAR(128) NOT NULL, ServerID BIGINT NOT NULL DEFAULT 0, Content BYTEA, PRIMARY KEY (FileName, ServerID))");
doUpdate(c, "CREATE INDEX " + tableMaps + "_idx ON " + tableMaps + "(WorldID, MapID, Variant, ServerID)");
doUpdate(c, "CREATE TABLE " + tableSchemaVersion + " (level INT PRIMARY KEY NOT NULL)");
- doUpdate(c, "INSERT INTO " + tableSchemaVersion + " (level) VALUES (3)");
+ doUpdate(c, "INSERT INTO " + tableSchemaVersion + " (level) VALUES (4)");
version = 4; // initialzed to current schema
} catch (SQLException x) {
logSQLException("Error creating tables", x); | ['DynmapCore/src/main/java/org/dynmap/storage/postgresql/PostgreSQLMapStorage.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,092,613 | 936,437 | 114,934 | 609 | 181 | 44 | 2 | 1 | 1,472 | 214 | 401 | 18 | 2 | 0 | 2022-05-30T19:28:22 | 1,882 | Java | {'Java': 4992284, 'JavaScript': 528125, 'PHP': 56381, 'CSS': 34930, 'HTML': 7454, 'ASP.NET': 1292} | Apache License 2.0 |
96 | uwetrottmann/seriesguide/721/719 | uwetrottmann | seriesguide | https://github.com/UweTrottmann/SeriesGuide/issues/719 | https://github.com/UweTrottmann/SeriesGuide/pull/721 | https://github.com/UweTrottmann/SeriesGuide/pull/721 | 1 | closes | Writing to Storage Access Framework file does not replace existing data | `JsonExportTask` does not clear existing file contents before writing new data. If the written JSON is shorter than the existing file contents invalid JSON ends up in the file. | 044b583d23bd6178096a3a6fddbf3471cea72bfe | d0098d9b853d63ab0380e63669342bf5e23a20ae | https://github.com/uwetrottmann/seriesguide/compare/044b583d23bd6178096a3a6fddbf3471cea72bfe...d0098d9b853d63ab0380e63669342bf5e23a20ae | diff --git a/app/src/main/java/com/battlelancer/seriesguide/dataliberation/JsonExportTask.java b/app/src/main/java/com/battlelancer/seriesguide/dataliberation/JsonExportTask.java
index 786ea045f..f53d8ba49 100644
--- a/app/src/main/java/com/battlelancer/seriesguide/dataliberation/JsonExportTask.java
+++ b/app/src/main/java/com/battlelancer/seriesguide/dataliberation/JsonExportTask.java
@@ -258,6 +258,12 @@ public class JsonExportTask extends AsyncTask<Void, Integer, Integer> {
}
FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor());
+ // Even though using streams and FileOutputStream does not append by
+ // default, using Storage Access Framework just overwrites existing
+ // bytes, potentially leaving old bytes hanging over:
+ // so truncate the file first to clear any existing bytes.
+ out.getChannel().truncate(0);
+
if (type == BACKUP_SHOWS) {
writeJsonStreamShows(out, data);
} else if (type == BACKUP_LISTS) { | ['app/src/main/java/com/battlelancer/seriesguide/dataliberation/JsonExportTask.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,842,065 | 371,377 | 50,448 | 313 | 366 | 58 | 6 | 1 | 176 | 29 | 35 | 1 | 0 | 0 | 2020-03-12T08:43:42 | 1,822 | Kotlin | {'Kotlin': 1997116, 'Java': 814063, 'CSS': 22455, 'PowerShell': 660} | Apache License 2.0 |
21 | jdbi/jdbi/1460/1459 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1459 | https://github.com/jdbi/jdbi/pull/1460 | https://github.com/jdbi/jdbi/pull/1460 | 1 | fix | vavr argument factory should check type rather than `instanceof` | I think this is wrong:
https://github.com/jdbi/jdbi/blob/master/vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java#L41
This ends up giving the wrong type for `null` values.
Ref: https://groups.google.com/forum/#!topic/jdbi/piSOY5yB320
| bed76121bfc27906e520968b353a63192641a727 | 8ba55119625f6ef118e36d6a55259c7ded89cd15 | https://github.com/jdbi/jdbi/compare/bed76121bfc27906e520968b353a63192641a727...8ba55119625f6ef118e36d6a55259c7ded89cd15 | diff --git a/core/src/main/java/org/jdbi/v3/core/argument/NullArgument.java b/core/src/main/java/org/jdbi/v3/core/argument/NullArgument.java
index 411e13ce4..50b6cd86a 100644
--- a/core/src/main/java/org/jdbi/v3/core/argument/NullArgument.java
+++ b/core/src/main/java/org/jdbi/v3/core/argument/NullArgument.java
@@ -49,6 +49,14 @@ public class NullArgument implements Argument {
}
}
+ /**
+ * @return the SQL type of the null
+ * @see java.sql.Types
+ */
+ public Integer getSqlType() {
+ return sqlType;
+ }
+
@Override
public String toString() {
return "NULL";
diff --git a/vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java b/vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java
index addc95d4e..ab05234a5 100644
--- a/vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java
+++ b/vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java
@@ -14,7 +14,10 @@
package org.jdbi.v3.vavr;
import java.lang.reflect.Type;
+import java.util.Arrays;
+import java.util.HashSet;
import java.util.Optional;
+import java.util.Set;
import io.vavr.Lazy;
import io.vavr.Value;
@@ -26,6 +29,7 @@ import org.jdbi.v3.core.argument.Argument;
import org.jdbi.v3.core.argument.ArgumentFactory;
import org.jdbi.v3.core.argument.Arguments;
import org.jdbi.v3.core.config.ConfigRegistry;
+import org.jdbi.v3.core.generic.GenericTypes;
import static org.jdbi.v3.core.generic.GenericTypes.findGenericParameter;
@@ -35,27 +39,26 @@ import static org.jdbi.v3.core.generic.GenericTypes.findGenericParameter;
* if there is no such value (Try-Failed, Either-Left...) a "null" value will be applied as argument value
*/
class VavrValueArgumentFactory implements ArgumentFactory {
+ private static final Set<Class<?>> VALUE_CLASSES = new HashSet<>(Arrays.asList(Option.class, Lazy.class, Try.class, Either.class, Validation.class));
@Override
public Optional<Argument> build(Type type, Object value, ConfigRegistry config) {
- if (value instanceof Option || value instanceof Lazy || value instanceof Try || value instanceof Either || value instanceof Validation) {
+ Class<?> rawType = GenericTypes.getErasedType(type);
+
+ if (VALUE_CLASSES.stream().anyMatch(vc -> vc.isAssignableFrom(rawType))) {
return buildValueArgument(type, config, (Value) value);
}
return Optional.empty();
}
- private Optional<Argument> buildValueArgument(Type type, ConfigRegistry config, Value<?> value) {
+ private static Optional<Argument> buildValueArgument(Type type, ConfigRegistry config, Value<?> value) {
Type nestedType = findGenericParameter(type, Value.class).orElseGet(() -> extractTypeOfValue(value));
- Object nestedValue = value.getOrNull();
- return resolveNestedFromConfigured(config, nestedType, nestedValue);
- }
-
- Optional<Argument> resolveNestedFromConfigured(ConfigRegistry config, Type nestedType, Object nestedValue) {
+ Object nestedValue = value == null ? null : value.getOrNull();
return config.get(Arguments.class).findFor(nestedType, nestedValue);
}
- private Type extractTypeOfValue(Value<?> value) {
+ private static Type extractTypeOfValue(Value<?> value) {
Value<Class<?>> classOfValue = value.map(Object::getClass);
return classOfValue.getOrElse(Object.class);
}
diff --git a/vavr/src/test/java/org/jdbi/v3/vavr/TestVavrValueArgumentFactory.java b/vavr/src/test/java/org/jdbi/v3/vavr/TestVavrValueArgumentFactory.java
index c40292a1b..60062d12e 100644
--- a/vavr/src/test/java/org/jdbi/v3/vavr/TestVavrValueArgumentFactory.java
+++ b/vavr/src/test/java/org/jdbi/v3/vavr/TestVavrValueArgumentFactory.java
@@ -14,67 +14,60 @@
package org.jdbi.v3.vavr;
import java.lang.reflect.Type;
+import java.sql.Types;
import java.util.Optional;
import io.vavr.Lazy;
-import io.vavr.NotImplementedError;
import io.vavr.control.Either;
import io.vavr.control.Option;
import io.vavr.control.Try;
import io.vavr.control.Validation;
import org.jdbi.v3.core.argument.Argument;
+import org.jdbi.v3.core.argument.NullArgument;
import org.jdbi.v3.core.config.ConfigRegistry;
import org.jdbi.v3.core.generic.GenericType;
-import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class TestVavrValueArgumentFactory {
+ private static final Type TRY_INTEGER = new GenericType<Try<Integer>>() {}.getType();
+ private static final Type OPTION_INTEGER = new GenericType<Option<Integer>>() {}.getType();
+ private static final Type LAZY_INTEGER = new GenericType<Lazy<Integer>>() {}.getType();
+ private static final Type LAZY_WILDCARD = new GenericType<Lazy<?>>() {}.getType();
+ private static final Type EITHER_STRING_INTEGER = new GenericType<Either<String, Integer>>() {}.getType();
+ private static final Type EITHER_WILDCARD = new GenericType<Either<?, ?>>() {}.getType();
+ private static final Type VALIDATION_STRING_INT = new GenericType<Validation<String, Integer>>() {}.getType();
- private static final Argument MOCK_ARGUMENT = ((position, statement, ctx) -> new NotImplementedError());
+ private ConfigRegistry configRegistry = new ConfigRegistry();
- private VavrValueArgumentFactory unit;
-
- @Before
- public void setUp() {
- unit = new VavrValueArgumentFactory() {
- @Override
- Optional<Argument> resolveNestedFromConfigured(ConfigRegistry config, Type nestedType, Object nestedValue) {
- return Optional.of(MOCK_ARGUMENT);
- }
- };
- }
+ private VavrValueArgumentFactory unit = new VavrValueArgumentFactory();
@Test
public void testGetNonValueArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Option<Integer>>() {}.getType(),
- Option.of(1), null);
+ Optional<Argument> arg = unit.build(OPTION_INTEGER, Option.of(1), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetArgumentOfNoneShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Option<Integer>>() {}.getType(),
- Option.none(), null);
+ Optional<Argument> arg = unit.build(OPTION_INTEGER, Option.none(), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetLazyArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Lazy<Integer>>() {}.getType(),
- Lazy.of(() -> 1), null);
+ Optional<Argument> arg = unit.build(LAZY_INTEGER, Lazy.of(() -> 1), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetLazyArgumentInferredShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Lazy<?>>() {}.getType(),
- Lazy.of(() -> 1), null);
+ Optional<Argument> arg = unit.build(LAZY_WILDCARD, Lazy.of(() -> 1), configRegistry);
assertThat(arg).isNotEmpty();
}
@@ -85,85 +78,75 @@ public class TestVavrValueArgumentFactory {
throw new TestSpecificException();
});
- assertThatThrownBy(() -> unit.build(new GenericType<Lazy<Integer>>() {}.getType(),
- badEvaluatingLazy, null))
+ assertThatThrownBy(() -> unit.build(LAZY_INTEGER, badEvaluatingLazy, configRegistry))
.isInstanceOf(TestSpecificException.class);
}
@Test
public void testGetFailedTryArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Try<Integer>>() {}.getType(),
- Try.failure(new TestSpecificException()), null);
+ Optional<Argument> arg = unit.build(TRY_INTEGER, Try.failure(new TestSpecificException()), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetSuccessTryArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Try<Integer>>() {}.getType(),
- Try.failure(new TestSpecificException()), null);
+ Optional<Argument> arg = unit.build(TRY_INTEGER, Try.failure(new TestSpecificException()), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetLeftEitherArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Either<String, Integer>>() {}.getType(),
- Either.left("error"), null);
+ Optional<Argument> arg = unit.build(EITHER_STRING_INTEGER, Either.left("error"), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetRightEitherArgumentShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Either<String, Integer>>() {}.getType(),
- Either.right(1), null);
+ Optional<Argument> arg = unit.build(EITHER_STRING_INTEGER, Either.right(1), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetRightEitherArgumentInferredShouldNotBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Either<?, ?>>() {}.getType(),
- Either.right(1), null);
+ Optional<Argument> arg = unit.build(EITHER_WILDCARD, Either.right(1), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetValidValidationArgumentShouldNotBeEmpty() {
- Optional<Argument> arg =
- unit.build(new GenericType<Validation<String, Integer>>() {}.getType(),
- Validation.valid(1), null);
+ Optional<Argument> arg = unit.build(VALIDATION_STRING_INT, Validation.valid(1), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
public void testGetInvalidValidationArgumentShouldNotBeEmpty() {
- Optional<Argument> arg =
- unit.build(new GenericType<Validation<String, Integer>>() {}.getType(),
- Validation.invalid("error"), null);
+ Optional<Argument> arg = unit.build(VALIDATION_STRING_INT, Validation.invalid("error"), configRegistry);
assertThat(arg).isNotEmpty();
}
@Test
- public void testGetArgumentNotPartOfFactoryShouldBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Option<Integer>>() {}.getType(),
- 1, null);
+ public void testGetArgumentForNull() {
+ Optional<Argument> arg = unit.build(OPTION_INTEGER, null, configRegistry);
- assertThat(arg).isEmpty();
+ assertThat(((NullArgument) arg.get()).getSqlType())
+ .isEqualTo(Types.INTEGER);
}
@Test
- public void testGetArgumentNotPartOfFactory2ShouldBeEmpty() {
- Optional<Argument> arg = unit.build(new GenericType<Integer>() {}.getType(),
- null, null);
+ public void testGetArgumentNotPartOfFactoryShouldBeEmpty() {
+ Optional<Argument> arg = unit.build(new GenericType<Integer>() {}.getType(), null, configRegistry);
assertThat(arg).isEmpty();
}
- private static class TestSpecificException extends RuntimeException {}
-
+ private static class TestSpecificException extends RuntimeException {
+ private static final long serialVersionUID = 1L;
+ }
} | ['vavr/src/main/java/org/jdbi/v3/vavr/VavrValueArgumentFactory.java', 'vavr/src/test/java/org/jdbi/v3/vavr/TestVavrValueArgumentFactory.java', 'core/src/main/java/org/jdbi/v3/core/argument/NullArgument.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,306,763 | 274,969 | 36,242 | 487 | 1,379 | 285 | 27 | 2 | 256 | 18 | 76 | 6 | 2 | 0 | 2019-02-11T07:11:46 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
22 | jdbi/jdbi/1423/1422 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1422 | https://github.com/jdbi/jdbi/pull/1423 | https://github.com/jdbi/jdbi/pull/1423 | 1 | fix | test: Nondeterminstic `TestTimestampted` test failures | Observed in Travis build https://travis-ci.org/jdbi/jdbi/jobs/481605774
```
[ERROR] Failures:
[ERROR] TestTimestamped.shouldAllowCustomTimestampParameter:88
Expecting:
<2019-01-19T01:49:50.767+02:00>
to be between:
[2019-01-18T23:49:50.766Z, 2019-01-18T23:49:50.767Z]
```
I'm not sure exactly what's going on with this failure. | 9ffec885f85fe10e6ee9def613ac777122148424 | f30baaae11cb2d6f9f1bf7285a4791628319810b | https://github.com/jdbi/jdbi/compare/9ffec885f85fe10e6ee9def613ac777122148424...f30baaae11cb2d6f9f1bf7285a4791628319810b | diff --git a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/internal/TimestampedFactory.java b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/internal/TimestampedFactory.java
index b9db6a4b3..cec103c6a 100644
--- a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/internal/TimestampedFactory.java
+++ b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/internal/TimestampedFactory.java
@@ -15,22 +15,35 @@ package org.jdbi.v3.sqlobject.customizer.internal;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
+import java.time.Clock;
import java.time.OffsetDateTime;
import java.time.ZoneId;
+import java.util.function.Function;
+import org.jdbi.v3.meta.Beta;
import org.jdbi.v3.sqlobject.customizer.SqlStatementCustomizer;
import org.jdbi.v3.sqlobject.customizer.SqlStatementCustomizerFactory;
import org.jdbi.v3.sqlobject.customizer.Timestamped;
import org.jdbi.v3.sqlobject.customizer.TimestampedConfig;
public class TimestampedFactory implements SqlStatementCustomizerFactory {
+ private static Function<ZoneId, Clock> timeSource = Clock::system;
+
@Override
public SqlStatementCustomizer createForMethod(Annotation annotation, Class<?> sqlObjectType, Method method) {
final String parameterName = ((Timestamped) annotation).value();
return stmt -> {
ZoneId zone = stmt.getConfig(TimestampedConfig.class).getTimezone();
- stmt.bind(parameterName, OffsetDateTime.now(zone));
+ stmt.bind(parameterName, OffsetDateTime.now(timeSource.apply(zone)));
};
}
+
+ /**
+ * for testing purposes only
+ */
+ @Beta
+ static void setTimeSource(Function<ZoneId, Clock> timeSource) {
+ TimestampedFactory.timeSource = timeSource;
+ }
}
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/MockClock.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/MockClock.java
index 1cf8f0a67..ec8f6ddc1 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/MockClock.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/MockClock.java
@@ -16,27 +16,41 @@ package org.jdbi.v3.sqlobject;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.time.ZonedDateTime;
import java.time.temporal.TemporalUnit;
-class MockClock extends Clock {
- private Instant now = Instant.now();
+public class MockClock extends Clock {
+ private ZonedDateTime now;
+
+ private MockClock(ZonedDateTime now) {
+ this.now = now;
+ }
@Override
public ZoneId getZone() {
- return ZoneId.systemDefault();
+ return now.getZone();
}
@Override
public Clock withZone(ZoneId zone) {
- throw new UnsupportedOperationException();
+ return new MockClock(now.withZoneSameInstant(zone));
}
@Override
public Instant instant() {
- return now;
+ return now.toInstant();
}
public Instant advance(long amountToAdd, TemporalUnit unit) {
- return now = now.plus(amountToAdd, unit);
+ now = now.plus(amountToAdd, unit);
+ return instant();
+ }
+
+ public static MockClock now() {
+ return at(ZonedDateTime.now());
+ }
+
+ public static MockClock at(ZonedDateTime now) {
+ return new MockClock(now);
}
}
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestInheritedAnnotations.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestInheritedAnnotations.java
index aac47aed4..a49eac0a1 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestInheritedAnnotations.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestInheritedAnnotations.java
@@ -37,7 +37,7 @@ public class TestInheritedAnnotations {
@Rule
public H2DatabaseRule dbRule = new H2DatabaseRule().withPlugin(new SqlObjectPlugin());
- private MockClock mockClock = new MockClock();
+ private MockClock mockClock = MockClock.now();
@Before
public void setUp() {
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestTimestamped.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/customizer/internal/TestTimestamped.java
similarity index 84%
rename from sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestTimestamped.java
rename to sqlobject/src/test/java/org/jdbi/v3/sqlobject/customizer/internal/TestTimestamped.java
index b6ed8edae..375a9bde5 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestTimestamped.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/customizer/internal/TestTimestamped.java
@@ -11,18 +11,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jdbi.v3.sqlobject;
+package org.jdbi.v3.sqlobject.customizer.internal;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.Month;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
+import java.time.temporal.ChronoUnit;
import org.jdbi.v3.core.Jdbi;
import org.jdbi.v3.core.mapper.RowMapper;
import org.jdbi.v3.core.statement.SqlLogger;
import org.jdbi.v3.core.statement.StatementContext;
+import org.jdbi.v3.sqlobject.MockClock;
+import org.jdbi.v3.sqlobject.SqlObjectPlugin;
import org.jdbi.v3.sqlobject.config.RegisterRowMapper;
import org.jdbi.v3.sqlobject.customizer.Bind;
import org.jdbi.v3.sqlobject.customizer.BindBean;
@@ -42,22 +48,25 @@ import static org.assertj.core.api.Assertions.assertThat;
* Tests for the {@link Timestamped} annotation
*/
public class TestTimestamped {
- private static final ZoneOffset OFFSET = ZoneOffset.ofHours(2);
+ private static final ZoneOffset GMT_PLUS_2 = ZoneOffset.ofHours(2);
+ private static final OffsetDateTime UTC_MOMENT = OffsetDateTime.of(LocalDate.of(2018, Month.JANUARY, 1), LocalTime.NOON, ZoneOffset.UTC);
@Rule
public JdbiRule dbRule = JdbiRule.h2().withPlugin(new SqlObjectPlugin());
-
private PersonDAO personDAO;
- private OffsetDateTime timestamp;
- private Timestamp sqlTimestamp;
- private OffsetDateTime testStart;
+
+ private OffsetDateTime insertedTimestamp;
+ private Timestamp insertedSqlTimestamp;
+
+ private final MockClock clock = MockClock.at(UTC_MOMENT.toZonedDateTime());
@Before
public void before() {
+ TimestampedFactory.setTimeSource(clock::withZone);
+ dbRule.getJdbi().getConfig(TimestampedConfig.class).setTimezone(GMT_PLUS_2);
+
personDAO = dbRule.getJdbi().onDemand(PersonDAO.class);
personDAO.createTable();
- dbRule.getJdbi().getConfig(TimestampedConfig.class).setTimezone(OFFSET);
- testStart = OffsetDateTime.now();
}
@Test
@@ -67,14 +76,14 @@ public class TestTimestamped {
recordNextTimestamp("now");
personDAO.insert(input);
- assertThat(timestamp.getOffset()).isEqualTo(OFFSET);
- assertThat(timestamp).isBetween(testStart, OffsetDateTime.now());
+ assertThat(insertedTimestamp.getOffset()).isEqualTo(GMT_PLUS_2);
+ assertThat(insertedTimestamp.toInstant()).isEqualTo(UTC_MOMENT.toInstant());
Person result = personDAO.get(1);
assertThat(result.getCreated())
.isEqualTo(result.getModified())
- .isEqualTo(sqlTimestamp);
+ .isEqualTo(insertedSqlTimestamp);
}
@Test
@@ -84,8 +93,8 @@ public class TestTimestamped {
recordNextTimestamp("createdAt");
personDAO.insertWithCustomTimestampFields(input);
- assertThat(timestamp.getOffset()).isEqualTo(OFFSET);
- assertThat(timestamp).isBetween(testStart, OffsetDateTime.now());
+ assertThat(insertedTimestamp.getOffset()).isEqualTo(GMT_PLUS_2);
+ assertThat(insertedTimestamp.toInstant()).isEqualTo(UTC_MOMENT.toInstant());
Person result = personDAO.get(1);
@@ -93,7 +102,7 @@ public class TestTimestamped {
assertThat(result.getLastName()).isEqualTo(input.getLastName());
assertThat(result.getCreated())
.isEqualTo(result.getModified())
- .isEqualTo(sqlTimestamp);
+ .isEqualTo(insertedSqlTimestamp);
}
@Test
@@ -103,17 +112,19 @@ public class TestTimestamped {
recordNextTimestamp("now");
personDAO.insert(input);
- Timestamp insert = sqlTimestamp;
+ Timestamp insert = insertedSqlTimestamp;
Person fetched = personDAO.get(3);
fetched.setLastName("Banda");
+ clock.advance(1, ChronoUnit.SECONDS);
recordNextTimestamp("now");
personDAO.updatePerson(fetched);
- Timestamp update = sqlTimestamp;
+ Timestamp update = insertedSqlTimestamp;
Person result = personDAO.get(3);
+ assertThat(insert).isNotEqualTo(update);
assertThat(result.getLastName()).isEqualToIgnoringCase("Banda");
assertThat(result.getCreated()).isEqualTo(insert);
assertThat(result.getModified()).isEqualTo(update);
@@ -151,8 +162,8 @@ public class TestTimestamped {
.findForName(name, ctx)
.orElseThrow(AssertionError::new)
.toString();
- timestamp = OffsetDateTime.parse(toString);
- sqlTimestamp = Timestamp.from(timestamp.toInstant());
+ insertedTimestamp = OffsetDateTime.parse(toString);
+ insertedSqlTimestamp = Timestamp.from(insertedTimestamp.toInstant());
}
@Override | ['sqlobject/src/test/java/org/jdbi/v3/sqlobject/MockClock.java', 'sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestTimestamped.java', 'sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestInheritedAnnotations.java', 'sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/internal/TimestampedFactory.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 1,260,699 | 264,747 | 34,935 | 471 | 508 | 109 | 15 | 1 | 346 | 28 | 124 | 12 | 1 | 1 | 2019-01-19T00:27:39 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
23 | jdbi/jdbi/1339/1338 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1338 | https://github.com/jdbi/jdbi/pull/1339 | https://github.com/jdbi/jdbi/pull/1339 | 2 | fixes | TransactionIsolationLevel.UNKNOWN cause unexpected failure | Hello @jdbi Team!
I was implementing some nifty transactions support which uses `Transaction` annotation of JDBI and got exception I have not expected.
Sample code:
```java
@Rule
public JdbiMySqlRule db = new JdbiMySqlRule();
interface SampleDao {
@SqlQuery("select count(*) from sample")
int select();
}
static class Bubu {
@Transaction
public void doSomething() {
}
}
@Test
public void test_tx() throws NoSuchMethodException, SecurityException {
final Method method = Bubu.class.getDeclaredMethod("doSomething");
final Transaction tx = method.getAnnotation(Transaction.class);
db.getHandle().inTransaction(tx.value(), c -> {
return null; // do nothing
});
}
```
Stack trace:
```plain
org.jdbi.v3.core.transaction.UnableToManipulateTransactionIsolationLevelException: Unable to set isolation level to -2147483648
at org.jdbi.v3.core.Handle.setTransactionIsolation(Handle.java:477)
at org.jdbi.v3.core.Handle.setTransactionIsolation(Handle.java:461)
at org.jdbi.v3.core.Handle.inTransaction(Handle.java:430)
at ***
Caused by: org.h2.jdbc.JdbcSQLException: Invalid value "-2147483648" for parameter "level" [90008-197]
at org.h2.message.DbException.getJdbcSQLException(DbException.java:357)
at org.h2.message.DbException.get(DbException.java:179)
at org.h2.message.DbException.getInvalidValueException(DbException.java:240)
at org.h2.jdbc.JdbcConnection.setTransactionIsolation(JdbcConnection.java:744)
at org.jdbi.v3.core.Handle.setTransactionIsolation(Handle.java:475)
... 32 more
```
I understand why this is happening, but from API point of view this is rather unexpected since `TransactionIsolationLevel.UNKNOWN`, which is a default `Transaction` value, is a valid enum value. As API user I would expect not to have any errors regardless of `TransactionIsolationLevel` being used.
There is this method in `Handle` class: https://github.com/jdbi/jdbi/blob/master/core/src/main/java/org/jdbi/v3/core/Handle.java#L472-L482
```java
/**
* Set the transaction isolation level on the underlying connection.
*
* @param level the isolation level to use
*/
public void setTransactionIsolation(int level) {
try {
if (connection.getTransactionIsolation() == level) {
// already set, noop
return;
}
connection.setTransactionIsolation(level);
} catch (SQLException e) {
throw new UnableToManipulateTransactionIsolationLevelException(level, e);
}
}
```
Which I think should be changed to:
```java
/**
* Set the transaction isolation level on the underlying connection.
*
* @param level the isolation level to use
*/
public void setTransactionIsolation(int level) {
try {
boolean isAlreadySet = level == connection.getTransactionIsolation();
boolean isUnknown = level == TransactionIsolationLevel.UNKNOWN;
if (isAlreadySet || isUnknown) {
// already set or unknown, noop
return;
}
connection.setTransactionIsolation(level);
} catch (SQLException e) {
throw new UnableToManipulateTransactionIsolationLevelException(level, e);
}
}
```
So that `TransactionIsolationLevel.UNKNOWN` is handled as a special case.
I see this already done in [`TransactionDecorator`](https://github.com/jdbi/jdbi/blob/a831d3314db43859c9894aa987d3ee4827edc459/sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java#L58-L62) and I'm wondering - why this is done in abstraction and not on a lower lever, in a `Handle` class. Is there something I am missing? Something which requires `Handle` to fail when transaction isolation level is unknown?
One of the other options is to have `inTransaction()` method modified to detect this case, but this will rather mask the problem, not fix it. IMHO `TransactionIsolationLevel.UNKNOWN` should be handled at the most bottom layer, before it's propagated to `Connection`, but I may be wrong since I'm not as familiar with JDBI internals as you are, guys.
This concludes my issue. | 981eea33dc4b0fdb8640144e599680fe77e4fe8c | 4fe1e506618993f0dc78c2cda1505d41de147d34 | https://github.com/jdbi/jdbi/compare/981eea33dc4b0fdb8640144e599680fe77e4fe8c...4fe1e506618993f0dc78c2cda1505d41de147d34 | diff --git a/core/src/main/java/org/jdbi/v3/core/Handle.java b/core/src/main/java/org/jdbi/v3/core/Handle.java
index 26701def2..755644870 100644
--- a/core/src/main/java/org/jdbi/v3/core/Handle.java
+++ b/core/src/main/java/org/jdbi/v3/core/Handle.java
@@ -458,10 +458,14 @@ public class Handle implements Closeable, Configurable<Handle> {
/**
* Set the transaction isolation level on the underlying connection.
*
+ * @throws UnableToManipulateTransactionIsolationLevelException if isolation level is not supported by the underlying connection or JDBC driver
+ *
* @param level the isolation level to use
*/
public void setTransactionIsolation(TransactionIsolationLevel level) {
- setTransactionIsolation(level.intValue());
+ if (level != TransactionIsolationLevel.UNKNOWN) {
+ setTransactionIsolation(level.intValue());
+ }
}
/**
@@ -471,11 +475,9 @@ public class Handle implements Closeable, Configurable<Handle> {
*/
public void setTransactionIsolation(int level) {
try {
- if (connection.getTransactionIsolation() == level) {
- // already set, noop
- return;
+ if (connection.getTransactionIsolation() != level) {
+ connection.setTransactionIsolation(level);
}
- connection.setTransactionIsolation(level);
} catch (SQLException e) {
throw new UnableToManipulateTransactionIsolationLevelException(level, e);
}
diff --git a/core/src/main/java/org/jdbi/v3/core/Jdbi.java b/core/src/main/java/org/jdbi/v3/core/Jdbi.java
index 7b99632f9..a5bbc6539 100644
--- a/core/src/main/java/org/jdbi/v3/core/Jdbi.java
+++ b/core/src/main/java/org/jdbi/v3/core/Jdbi.java
@@ -399,7 +399,7 @@ public class Jdbi implements Configurable<Jdbi> {
* @throws X any exception thrown by the callback
*/
public <R, X extends Exception> R inTransaction(final TransactionIsolationLevel level, final HandleCallback<R, X> callback) throws X {
- return withHandle(handle -> handle.<R, X>inTransaction(level, callback));
+ return withHandle(handle -> handle.inTransaction(level, callback));
}
/**
diff --git a/core/src/test/java/org/jdbi/v3/core/TestHandle.java b/core/src/test/java/org/jdbi/v3/core/TestHandle.java
index 80d0317e6..711471c2e 100644
--- a/core/src/test/java/org/jdbi/v3/core/TestHandle.java
+++ b/core/src/test/java/org/jdbi/v3/core/TestHandle.java
@@ -14,10 +14,14 @@
package org.jdbi.v3.core;
import org.jdbi.v3.core.rule.H2DatabaseRule;
+import org.jdbi.v3.core.transaction.TransactionIsolationLevel;
+import org.jdbi.v3.core.transaction.UnableToManipulateTransactionIsolationLevelException;
import org.junit.Rule;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatCode;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class TestHandle {
@Rule
@@ -62,4 +66,15 @@ public class TestHandle {
final Handle h = dbRule.getSharedHandle();
h.execute("CREATE TABLE \\"\\u2603\\" (pk int primary key)");
}
+
+ @Test
+ public void unknownTransactionLevelIsOk() {
+ Handle h = dbRule.openHandle();
+
+ assertThatThrownBy(() -> h.setTransactionIsolation(Integer.MIN_VALUE))
+ .isInstanceOf(UnableToManipulateTransactionIsolationLevelException.class);
+
+ assertThatCode(() -> h.setTransactionIsolation(TransactionIsolationLevel.UNKNOWN))
+ .doesNotThrowAnyException();
+ }
}
diff --git a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java
index b7c6ce9d9..ca7c6f663 100644
--- a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java
+++ b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java
@@ -55,11 +55,7 @@ public class TransactionDecorator implements HandlerDecorator {
}
try {
- if (isolation == TransactionIsolationLevel.UNKNOWN) {
- return h.inTransaction(callback);
- } else {
- return h.inTransaction(isolation, callback);
- }
+ return h.inTransaction(isolation, callback);
} finally {
if (flipReadOnly) {
h.setReadOnly(!readOnly); | ['sqlobject/src/main/java/org/jdbi/v3/sqlobject/transaction/internal/TransactionDecorator.java', 'core/src/test/java/org/jdbi/v3/core/TestHandle.java', 'core/src/main/java/org/jdbi/v3/core/Jdbi.java', 'core/src/main/java/org/jdbi/v3/core/Handle.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 1,185,510 | 248,249 | 32,783 | 438 | 1,102 | 186 | 20 | 3 | 4,260 | 422 | 954 | 105 | 2 | 4 | 2018-12-12T11:03:53 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
24 | jdbi/jdbi/1291/1283 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1283 | https://github.com/jdbi/jdbi/pull/1291 | https://github.com/jdbi/jdbi/pull/1291 | 1 | fixes | BindMethods doesn't work well with package-private implementations | I have an [`@AutoValue`d](https://github.com/google/auto/tree/master/value) class.
@AutoValue
public abstract class Person {
public static Person create(String name) { return new AutoValue_Person(name); }
public abstract String name();
}
This creates a package-private class named `AutoValue_Person`.
I'm trying to use it with the following SqlObject method:
@SqlUpdate("INSERT INTO person (name) VALUES (:name)")
void addPerson(@BindMethods Person person);
But it fails with the following message:
java.lang.IllegalAccessException: Class org.jdbi.v3.core.argument.MethodReturnValueNamedArgumentFinder can not access a member of class example.model.AutoValue_Person with modifiers "public"
at sun.reflect.Reflection.ensureMemberAccess(Reflection.java:102)
at java.lang.reflect.AccessibleObject.slowCheckMemberAccess(AccessibleObject.java:296)
at java.lang.reflect.AccessibleObject.checkAccess(AccessibleObject.java:288)
at java.lang.reflect.Method.invoke(Method.java:491)
at org.jdbi.v3.core.argument.MethodReturnValueNamedArgumentFinder.invokeMethod(MethodReturnValueNamedArgumentFinder.java:37)
The reason is that the methods are extracted from the actual object, not from the given interface. I want the methods from `Person` to be called, not from `AutoValue_Person`: calling `AutoValue_Person`'s method is Java's job, not yours, especially since jdbi wants to work in a SecurityManager-ed environment.
So please use the interface I provide in my SqlObject to call the methods, not the object themselves.
To prove my point that there is a difference between the two and that calling the interface works, I created the following test case that you can adapt so you don't have to use Google's `@AutoValue`:
**`example/Controller.java`**
package example;
import example.model.*;
import java.lang.reflect.*;
public class Controller {
public static void main(String[] args) throws Exception {
Person jack = Person.create("Jack");
System.out.print("Call using jack.name(): ");
System.out.println(jack.name());
call(jack, Person.class, "name");
call(jack, jack.getClass(), "name");
}
private static void call(Object instance, Class type, String methodName) throws Exception {
Method method = type.getMethod(methodName);
System.out.printf("Call using %s.getMethod(%s).invoke(instance): ", type, methodName);
System.out.println(method.invoke(instance));
}
}
**`example/model/Person.java`**
package example.model;
public abstract class Person {
public static Person create(String name) { return new PersonImpl(name); }
public abstract String name();
}
**`example/model/PersonImpl.java`**
package example.model;
class PersonImpl extends Person {
private String name;
PersonImpl(String name) { this.name = name; }
@Override public String name() { return name; }
}
The result is the following:
Call using jack.name(): Jack
Call using class example.model.Person.getMethod(name).invoke(instance): Jack
Call using class example.model.PersonImpl.getMethod(name).invoke(instance): Exception in thread "main" java.lang.IllegalAccessException: Class example.Controller can not access a member of class example.model.PersonImpl with modifiers "public"
at sun.reflect.Reflection.ensureMemberAccess(Reflection.java:102)
at java.lang.reflect.AccessibleObject.slowCheckMemberAccess(AccessibleObject.java:296)
at java.lang.reflect.AccessibleObject.checkAccess(AccessibleObject.java:288)
at java.lang.reflect.Method.invoke(Method.java:491)
at example.Controller.call(Controller.java:15)
at example.Controller.main(Controller.java:10)
So when you call the method, please use the method I provide in my interface (`Person`) and not the method provided in my implementation (`PersonImpl` or `AutoValue_Person`). This will work without any issue without ever calling `setAccessible(true)`. | aa6714e1feb2371263990f323641617ee12ef399 | 6ba34e1c6c211a8a0ea63edc6017cf897424a233 | https://github.com/jdbi/jdbi/compare/aa6714e1feb2371263990f323641617ee12ef399...6ba34e1c6c211a8a0ea63edc6017cf897424a233 | diff --git a/core/src/main/java/org/jdbi/v3/core/argument/ObjectMethodArguments.java b/core/src/main/java/org/jdbi/v3/core/argument/ObjectMethodArguments.java
index 073a4cc70..c40cbcd70 100644
--- a/core/src/main/java/org/jdbi/v3/core/argument/ObjectMethodArguments.java
+++ b/core/src/main/java/org/jdbi/v3/core/argument/ObjectMethodArguments.java
@@ -14,8 +14,10 @@
package org.jdbi.v3.core.argument;
import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
@@ -32,10 +34,7 @@ public class ObjectMethodArguments extends MethodReturnValueNamedArgumentFinder
private static final Map<Class<?>, Map<String, Method>> CLASS_METHODS = ExpiringMap.builder()
.expiration(10, TimeUnit.MINUTES)
.expirationPolicy(ExpirationPolicy.ACCESSED)
- .entryLoader((Class<?> type) ->
- Arrays.stream(type.getMethods())
- .filter(m -> m.getParameterCount() == 0)
- .collect(Collectors.toMap(Method::getName, Function.identity(), ObjectMethodArguments::bridgeMethodMerge)))
+ .entryLoader(ObjectMethodArguments::load)
.build();
private final Map<String, Method> methods;
@@ -50,6 +49,20 @@ public class ObjectMethodArguments extends MethodReturnValueNamedArgumentFinder
this.methods = CLASS_METHODS.get(object.getClass());
}
+ private static Map<String, Method> load(Class<?> type) {
+ // The package check could be removed if jdbi's tests would only use public classes for argument testing
+ if (Modifier.isPublic(type.getModifiers()) || type.getPackage().equals(ObjectMethodArguments.class.getPackage())) {
+ return Arrays.stream(type.getMethods())
+ .filter(m -> m.getParameterCount() == 0)
+ .collect(Collectors.toMap(Method::getName, Function.identity(), ObjectMethodArguments::bridgeMethodMerge));
+ } else {
+ final HashMap<String, Method> methodMap = new HashMap<>();
+ Optional.ofNullable(type.getSuperclass()).ifPresent(superclass -> methodMap.putAll(load(superclass)));
+ Arrays.stream(type.getInterfaces()).forEach(interfaceClass -> methodMap.putAll(load(interfaceClass)));
+ return methodMap;
+ }
+ }
+
@Override
Optional<TypedValue> getValue(String name, StatementContext ctx) {
Method method = methods.get(name);
diff --git a/core/src/test/java/org/jdbi/v3/core/argument/TestBeanArguments.java b/core/src/test/java/org/jdbi/v3/core/argument/TestBeanArguments.java
index f71590291..e3b3e99b4 100644
--- a/core/src/test/java/org/jdbi/v3/core/argument/TestBeanArguments.java
+++ b/core/src/test/java/org/jdbi/v3/core/argument/TestBeanArguments.java
@@ -204,4 +204,58 @@ public class TestBeanArguments {
verify(stmt).setLong(3, 69);
}
+
+ @Test
+ public void testPrivateClass() throws Exception {
+ new ObjectMethodArguments(null, Person.create("hello")).find("name", ctx).get().apply(4, stmt, null);
+ verify(stmt).setString(4, "hello");
+ }
+
+ @Test
+ public void testPrivateInterfaceClass() throws Exception {
+ new ObjectMethodArguments(null, Car.create("hello")).find("name", ctx).get().apply(4, stmt, null);
+ verify(stmt).setString(4, "hello");
+ }
+
+ public abstract static class Person {
+ public static Person create(String name) {
+ return new PersonImpl(name);
+ }
+
+ public abstract String name();
+
+ private static class PersonImpl extends Person {
+ private String name;
+
+ PersonImpl(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public String name() {
+ return name;
+ }
+ }
+ }
+
+ public interface Car {
+ static Car create(String name) {
+ return new CarImpl(name);
+ }
+
+ String name();
+ }
+
+ private static class CarImpl implements Car {
+ private String name;
+
+ CarImpl(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public String name() {
+ return name;
+ }
+ }
} | ['core/src/test/java/org/jdbi/v3/core/argument/TestBeanArguments.java', 'core/src/main/java/org/jdbi/v3/core/argument/ObjectMethodArguments.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,100,309 | 229,490 | 30,449 | 394 | 1,293 | 244 | 21 | 1 | 4,135 | 380 | 847 | 80 | 1 | 0 | 2018-10-27T09:57:56 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
27 | jdbi/jdbi/1008/1007 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1007 | https://github.com/jdbi/jdbi/pull/1008 | https://github.com/jdbi/jdbi/pull/1008 | 2 | fixes | PreparedBatch should clear bindings after execute() | The PreparedBatch class produces duplicate entries in the database when you invoke the execute() method multiple times. This sems to happen only when the bindBean() method is used. The attached file contains a test that illustrates the problem.
[TestPreparedBatch.java.txt](https://github.com/jdbi/jdbi/files/1642755/TestPreparedBatch.java.txt)
| cb51f6959c8d29d2b3279de3d9035d2960a808f2 | af3f93643fe9906712fd4b1d77491ba43fdb8cf6 | https://github.com/jdbi/jdbi/compare/cb51f6959c8d29d2b3279de3d9035d2960a808f2...af3f93643fe9906712fd4b1d77491ba43fdb8cf6 | diff --git a/core/src/main/java/org/jdbi/v3/core/statement/PreparedBatch.java b/core/src/main/java/org/jdbi/v3/core/statement/PreparedBatch.java
index c75687074..26e517966 100644
--- a/core/src/main/java/org/jdbi/v3/core/statement/PreparedBatch.java
+++ b/core/src/main/java/org/jdbi/v3/core/statement/PreparedBatch.java
@@ -189,6 +189,8 @@ public class PreparedBatch extends SqlStatement<PreparedBatch> implements Result
afterExecution(stmt);
+ getContext().setBinding(new Binding());
+
return new ExecutedBatch(stmt, rs);
}
catch (SQLException e) {
diff --git a/core/src/test/java/org/jdbi/v3/core/statement/TestPreparedBatch.java b/core/src/test/java/org/jdbi/v3/core/statement/TestPreparedBatch.java
index 72ecedc9e..a16f09aec 100644
--- a/core/src/test/java/org/jdbi/v3/core/statement/TestPreparedBatch.java
+++ b/core/src/test/java/org/jdbi/v3/core/statement/TestPreparedBatch.java
@@ -16,6 +16,7 @@ package org.jdbi.v3.core.statement;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
+import java.beans.ConstructorProperties;
import java.util.List;
import java.util.Map;
@@ -23,6 +24,7 @@ import com.google.common.collect.ImmutableMap;
import org.jdbi.v3.core.Handle;
import org.jdbi.v3.core.Something;
+import org.jdbi.v3.core.mapper.reflect.ConstructorMapper;
import org.jdbi.v3.core.rule.H2DatabaseRule;
import org.junit.After;
import org.junit.Before;
@@ -172,4 +174,79 @@ public class TestPreparedBatch
assertThat(ctx.getBinding().findForName("name", ctx).toString()).contains("bob");
}
}
+
+ @Test
+ public void testMultipleExecuteBindBean()
+ {
+ final PreparedBatch b = h.prepareBatch("insert into something (id, name) values (:id, :name)");
+
+ b.bindBean(new Something(1, "Eric")).add();
+ b.bindBean(new Something(2, "Brian")).add();
+ b.execute();
+
+ // bindings should be cleared after execute()
+
+ b.bindBean(new Something(3, "Keith")).add();
+ b.execute();
+
+ final List<Something> r = h.createQuery("select * from something order by id").mapToBean(Something.class).list();
+ assertThat(r).hasSize(3);
+ assertThat(r.get(0)).extracting(Something::getId, Something::getName).containsSequence(1, "Eric");
+ assertThat(r.get(1)).extracting(Something::getId, Something::getName).containsSequence(2, "Brian");
+ assertThat(r.get(2)).extracting(Something::getId, Something::getName).containsSequence(3, "Keith");
+ }
+
+ @Test
+ public void testMultipleExecuteBind()
+ {
+ final PreparedBatch b = h.prepareBatch("insert into something (id, name) values (:id, :name)");
+
+ b.bind("id", 1).bind("name", "Eric").add();
+ b.bind("id", 2).bind("name", "Brian").add();
+ b.execute();
+
+ // bindings should be cleared after execute()
+
+ b.bind("id", 3).bind("name", "Keith").add();
+ b.execute();
+
+ final List<Something> r = h.createQuery("select * from something order by id").mapToBean(Something.class).list();
+ assertThat(r).hasSize(3);
+ assertThat(r.get(0)).extracting(Something::getId, Something::getName).containsSequence(1, "Eric");
+ assertThat(r.get(1)).extracting(Something::getId, Something::getName).containsSequence(2, "Brian");
+ assertThat(r.get(2)).extracting(Something::getId, Something::getName).containsSequence(3, "Keith");
+ }
+
+ @Test
+ public void testMultipleExecuteBindFields()
+ {
+ h.registerRowMapper(ConstructorMapper.factory(PublicSomething.class));
+ final PreparedBatch b = h.prepareBatch("insert into something (id, name) values (:id, :name)");
+
+ b.bindFields(new PublicSomething(1, "Eric")).add();
+ b.bindFields(new PublicSomething(2, "Brian")).add();
+ b.execute();
+
+ // bindings should be cleared after execute()
+
+ b.bindFields(new PublicSomething(3, "Keith")).add();
+ b.execute();
+
+ final List<PublicSomething> r = h.createQuery("select * from something order by id").mapTo(PublicSomething.class).list();
+ assertThat(r).hasSize(3);
+ assertThat(r.get(0)).extracting(s -> s.id, s -> s.name).containsSequence(1, "Eric");
+ assertThat(r.get(1)).extracting(s -> s.id, s -> s.name).containsSequence(2, "Brian");
+ assertThat(r.get(2)).extracting(s -> s.id, s -> s.name).containsSequence(3, "Keith");
+ }
+
+ public static class PublicSomething {
+ public int id;
+ public String name;
+
+ @ConstructorProperties({"id", "name"})
+ public PublicSomething(Integer id, String name) {
+ this.id = id;
+ this.name = name;
+ }
+ }
} | ['core/src/test/java/org/jdbi/v3/core/statement/TestPreparedBatch.java', 'core/src/main/java/org/jdbi/v3/core/statement/PreparedBatch.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 955,116 | 200,145 | 26,850 | 348 | 58 | 8 | 2 | 1 | 353 | 39 | 75 | 6 | 1 | 0 | 2018-01-20T00:29:49 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
26 | jdbi/jdbi/1066/497 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/497 | https://github.com/jdbi/jdbi/pull/1066 | https://github.com/jdbi/jdbi/pull/1066 | 1 | fixes | JDK9 will block setAccessible on modules | We're using `setAccessible(true)` to get reflective access to private API in a couple places. We cannot count on this after JDK9 is released.
In particular, `DefaultMethodHandler` in SQL Object is using `setAccessible` to get access to the `MethodHandle.Lookup` for private members, so that we can invoke the default ("super") method.
See:
- http://jigsaw-dev.1059479.n5.nabble.com/creating-proxies-for-interfaces-with-default-methods-td5711955.html
- http://mlvm-dev.openjdk.java.narkive.com/ocxrCtZV/proxy-an-interface-and-call-a-default-method
| fe097743f63cfc7218ffc30f143a2d23fec186cf | f251de36a5196c1ad2033865f052d8ac411540d3 | https://github.com/jdbi/jdbi/compare/fe097743f63cfc7218ffc30f143a2d23fec186cf...f251de36a5196c1ad2033865f052d8ac411540d3 | diff --git a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/DefaultMethodHandler.java b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/DefaultMethodHandler.java
index 9a0db8f47..4727a6a2d 100644
--- a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/DefaultMethodHandler.java
+++ b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/DefaultMethodHandler.java
@@ -18,6 +18,7 @@ import static java.util.Collections.synchronizedMap;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.WeakHashMap;
@@ -25,22 +26,47 @@ import java.util.WeakHashMap;
import org.jdbi.v3.core.extension.HandleSupplier;
class DefaultMethodHandler implements Handler {
+ // MethodHandles.privateLookupIn(Class, Lookup) was added in JDK 9.
+ // JDK 9 allows us to unreflectSpecial() on an interface default method, where JDK 8 did not.
+ private static final Method PRIVATE_LOOKUP_IN = privateLookupIn();
+
+ private static Method privateLookupIn() {
+ try {
+ return MethodHandles.class.getMethod("privateLookupIn", Class.class, MethodHandles.Lookup.class);
+ } catch (NoSuchMethodException e) {
+ // Method was added in JDK 9
+ return null;
+ }
+ }
+
private static final Map<Class<?>, MethodHandles.Lookup> privateLookups = synchronizedMap(new WeakHashMap<>());
private static MethodHandles.Lookup lookupFor(Class<?> clazz) {
- return privateLookups.computeIfAbsent(clazz, type -> {
+ if (PRIVATE_LOOKUP_IN != null) {
try {
- // TERRIBLE, HORRIBLE, NO GOOD, VERY BAD HACK
- // Courtesy of:
- // https://rmannibucau.wordpress.com/2014/03/27/java-8-default-interface-methods-and-jdk-dynamic-proxies/
+ return (MethodHandles.Lookup) PRIVATE_LOOKUP_IN.invoke(null, clazz, MethodHandles.lookup());
+ } catch (IllegalAccessException | InvocationTargetException e) {
+ String message = String.format(
+ "Error invoking MethodHandles.privateLookupIn(%s.class, MethodHandles.lookup()) in JDK 9+ runtime",
+ clazz);
+ throw new RuntimeException(message, e);
+ }
+ }
+
+ // TERRIBLE, HORRIBLE, NO GOOD, VERY BAD HACK
+ // Courtesy of:
+ // https://rmannibucau.wordpress.com/2014/03/27/java-8-default-interface-methods-and-jdk-dynamic-proxies/
- // We can use MethodHandles to look up and invoke the super method, but since this class is not an
- // implementation of method.getDeclaringClass(), MethodHandles.Lookup will throw an exception since
- // this class doesn't have access to the super method, according to Java's access rules. This horrible,
- // awful workaround allows us to directly invoke MethodHandles.Lookup's private constructor, bypassing
- // the usual access checks.
+ // We can use MethodHandles to look up and invoke the super method, but since this class is not an
+ // implementation of method.getDeclaringClass(), MethodHandles.Lookup will throw an exception since
+ // this class doesn't have access to the super method, according to Java's access rules. This horrible,
+ // awful workaround allows us to directly invoke MethodHandles.Lookup's private constructor, bypassing
+ // the usual access checks.
- // We should get rid of this workaround as soon as a viable alternative exists.
+ // This workaround is only used in JDK 8.x runtimes. JDK 9+ runtimes use MethodHandles.privateLookupIn()
+ // above.
+ return privateLookups.computeIfAbsent(clazz, type -> {
+ try {
final Constructor<MethodHandles.Lookup> constructor =
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class); | ['sqlobject/src/main/java/org/jdbi/v3/sqlobject/DefaultMethodHandler.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,011,897 | 211,075 | 28,641 | 358 | 2,915 | 581 | 46 | 1 | 549 | 55 | 139 | 8 | 2 | 0 | 2018-04-02T22:50:59 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
29 | jdbi/jdbi/783/781 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/781 | https://github.com/jdbi/jdbi/pull/783 | https://github.com/jdbi/jdbi/pull/783 | 2 | fixed | Unable generate keys for a not prepared batch if inserted list size to contain BatchChunkSize | @SqlBatch("INSERT INTO users (name) VALUES (:name)")
@GetGeneratedKeys
public abstract int[] insertBatch(@BindBean List<User> users, @BatchChunkSize int chunk);
finished with `java.lang.IllegalArgumentException: Unable generate keys for a not prepared batch` if
last parts.size()==0 (inserted list size to contain BatchChunkSize)
```
java.lang.IllegalArgumentException: Unable generate keys for a not prepared batch
at org.skife.jdbi.v2.PreparedBatch.internalBatchExecute(PreparedBatch.java:147)
at org.skife.jdbi.v2.PreparedBatch.executeAndGenerateKeys(PreparedBatch.java:105)
at org.skife.jdbi.v2.PreparedBatch.executeAndGenerateKeys(PreparedBatch.java:134)
at org.skife.jdbi.v2.sqlobject.BatchHandler$2.value(BatchHandler.java:75)
at org.skife.jdbi.v2.sqlobject.BatchHandler$5.inTransaction(BatchHandler.java:231)
at org.skife.jdbi.v2.sqlobject.BatchHandler$5.inTransaction(BatchHandler.java:227)
at org.skife.jdbi.v2.tweak.transactions.LocalTransactionHandler.inTransaction(LocalTransactionHandler.java:183)
at org.skife.jdbi.v2.BasicHandle.inTransaction(BasicHandle.java:338)
at org.skife.jdbi.v2.sqlobject.BatchHandler.executeBatch(BatchHandler.java:226)
at org.skife.jdbi.v2.sqlobject.BatchHandler.invoke(BatchHandler.java:204)
at org.skife.jdbi.v2.sqlobject.SqlObject.invoke(SqlObject.java:224)
at org.skife.jdbi.v2.sqlobject.SqlObject$3.intercept(SqlObject.java:133)
at
```
what for `PreparedBatch` line 146-148 ? | e4d8abea814a31caba31482ffa680a543dbce718 | dca07d5cde0565a8d210afdbdef147bd5b155c24 | https://github.com/jdbi/jdbi/compare/e4d8abea814a31caba31482ffa680a543dbce718...dca07d5cde0565a8d210afdbdef147bd5b155c24 | diff --git a/src/main/java/org/skife/jdbi/v2/sqlobject/BatchHandler.java b/src/main/java/org/skife/jdbi/v2/sqlobject/BatchHandler.java
index b4210b644..a8c19c8a8 100644
--- a/src/main/java/org/skife/jdbi/v2/sqlobject/BatchHandler.java
+++ b/src/main/java/org/skife/jdbi/v2/sqlobject/BatchHandler.java
@@ -201,7 +201,9 @@ class BatchHandler extends CustomizingStatementHandler
}
//execute the rest
- rs_parts.add(executeBatch(handle, batch));
+ if (batch.getSize() > 0) {
+ rs_parts.add(executeBatch(handle, batch));
+ }
// combine results
int end_size = 0;
diff --git a/src/test/java/org/skife/jdbi/v2/sqlobject/TestBatching.java b/src/test/java/org/skife/jdbi/v2/sqlobject/TestBatching.java
index 876414065..e04a905d2 100644
--- a/src/test/java/org/skife/jdbi/v2/sqlobject/TestBatching.java
+++ b/src/test/java/org/skife/jdbi/v2/sqlobject/TestBatching.java
@@ -13,6 +13,7 @@
*/
package org.skife.jdbi.v2.sqlobject;
+import java.util.Collections;
import org.h2.jdbcx.JdbcDataSource;
import org.junit.After;
import org.junit.Before;
@@ -30,6 +31,7 @@ import java.util.List;
import java.util.UUID;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class TestBatching
@@ -166,6 +168,13 @@ public class TestBatching
b.invalidInsertString("bob");
}
+ @Test
+ public void testEmptyBatchWithGeneratedKeys() {
+ UsesBatching b = handle.attach(UsesBatching.class);
+ int[] updateCounts = b.insertChunkedGetKeys();
+ assertEquals(0, updateCounts.length);
+ }
+
@BatchChunkSize(4)
@UseStringTemplate3StatementLocator
public static interface UsesBatching
@@ -189,6 +198,10 @@ public class TestBatching
@SqlBatch
public int[] insertChunked(@BatchChunkSize int size, @BindBean("it") Iterable<Something> its);
+ @SqlBatch("insert into something (id, name) values (:id, :name)")
+ @GetGeneratedKeys(columnName = "id")
+ public int[] insertChunkedGetKeys(Something... values);
+
@SqlQuery("select count(*) from something")
public int size();
| ['src/test/java/org/skife/jdbi/v2/sqlobject/TestBatching.java', 'src/main/java/org/skife/jdbi/v2/sqlobject/BatchHandler.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 710,537 | 145,105 | 22,015 | 262 | 154 | 35 | 4 | 1 | 1,474 | 78 | 398 | 24 | 0 | 1 | 2017-03-28T00:20:28 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
30 | jdbi/jdbi/776/767 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/767 | https://github.com/jdbi/jdbi/pull/776 | https://github.com/jdbi/jdbi/pull/776 | 2 | fixes | SQL object types with overridden superinterface methods are treated as default | ```java
public interface BaseDao<T> {
void update(T entity);
}
public interface FooDao extends BaseDao<Foo> {
@Override
@SqlUpdate("update foo set name = :name where id = :id")
void update(@BindBean Foo foo);
}
```
At runtime, `Method.isDefault()` actually does return true for `FooDao.update()`, despite it not looking like a default method.
The trick is that this override method is actually a bridge method, which accepts a more specific type (Foo) than the generic parameter on the superclass (T, which is erased to Object), and merely forwards the request to the method with the erased type.
We should update the `isDefault()` check for annotated methods, to let bridge methods through. | fcf4a38d0d294c21a76688059373f7c95238a6f9 | a76356992aa81cc249b74f891101cc725c5e1217 | https://github.com/jdbi/jdbi/compare/fcf4a38d0d294c21a76688059373f7c95238a6f9...a76356992aa81cc249b74f891101cc725c5e1217 | diff --git a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/SqlMethodHandlerFactory.java b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/SqlMethodHandlerFactory.java
index b67aa8a48..7be96298f 100644
--- a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/SqlMethodHandlerFactory.java
+++ b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/SqlMethodHandlerFactory.java
@@ -13,6 +13,8 @@
*/
package org.jdbi.v3.sqlobject;
+import static java.util.stream.Collectors.toList;
+
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -20,8 +22,6 @@ import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
-import static java.util.stream.Collectors.toList;
-
public class SqlMethodHandlerFactory implements HandlerFactory {
@Override
public Optional<Handler> buildHandler(Class<?> sqlObjectType, Method method) {
@@ -42,7 +42,7 @@ public class SqlMethodHandlerFactory implements HandlerFactory {
sqlMethodAnnotations));
}
- if (method.isDefault()) {
+ if (method.isDefault() && !method.isSynthetic()) {
throw new IllegalStateException(String.format(
"Default method %s.%s has @%s annotation. " +
"SQL object methods may be default, or have a SQL method annotation, but not both.",
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestSqlObject.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestSqlObject.java
index f83131681..fb7f1ab1d 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestSqlObject.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestSqlObject.java
@@ -28,8 +28,10 @@ import org.jdbi.v3.core.Something;
import org.jdbi.v3.core.mapper.SomethingMapper;
import org.jdbi.v3.core.rule.H2DatabaseRule;
import org.jdbi.v3.core.transaction.TransactionException;
+import org.jdbi.v3.sqlobject.config.RegisterBeanMapper;
import org.jdbi.v3.sqlobject.config.RegisterRowMapper;
import org.jdbi.v3.sqlobject.customizer.Bind;
+import org.jdbi.v3.sqlobject.customizer.BindBean;
import org.jdbi.v3.sqlobject.customizer.Define;
import org.jdbi.v3.sqlobject.customizer.MaxRows;
import org.jdbi.v3.sqlobject.statement.SqlQuery;
@@ -196,6 +198,13 @@ public class TestSqlObject
assertThat(dao.update(2, "b")).isFalse();
}
+ @Test
+ public void testSubInterfaceOverridesSuperMethods() {
+ SubclassDao dao = handle.attach(SubclassDao.class);
+ dao.insert(new Something(1, "foo"));
+ assertThat(dao.get(1)).isEqualTo(new Something(1, "foo"));
+ }
+
@RegisterRowMapper(SomethingMapper.class)
public interface Dao extends SqlObject
{
@@ -283,4 +292,20 @@ public class TestSqlObject
return "foo";
}
}
+
+ public interface BaseDao<T> {
+ void insert(T obj);
+ T get(long id);
+ }
+
+ public interface SubclassDao extends BaseDao<Something> {
+ @Override
+ @SqlUpdate("insert into something (id, name) values (:id, :name)")
+ void insert(@BindBean Something something);
+
+ @Override
+ @SqlQuery("select * from something where id = :id")
+ @RegisterBeanMapper(Something.class)
+ Something get(long id);
+ }
} | ['sqlobject/src/main/java/org/jdbi/v3/sqlobject/SqlMethodHandlerFactory.java', 'sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestSqlObject.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 750,058 | 155,239 | 21,140 | 253 | 200 | 39 | 6 | 1 | 719 | 107 | 160 | 17 | 0 | 1 | 2017-03-23T05:14:44 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
31 | jdbi/jdbi/773/299 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/299 | https://github.com/jdbi/jdbi/pull/773 | https://github.com/jdbi/jdbi/pull/773 | 1 | fixes | Unable to set a null value via JDBI and Sybase JConn4 | `@SqlCall("{:retVal= exec sp_CountryCredit_Insert :clientID, :country, :user}")
OutParameters insertClientCountryCredit(@Bind("clientID") long clientID, @Bind("country") String country, @Bind("user") String user);`
when `country` is bind to NULL, then I'm getting
> JZ0SL: Unsupported SQL type 0.
after a closer look, the jdbi is trying to do
> statement.setNull(position, Types.NULL);
I override it to
> statement.setNull(position, Types.VARCHAR);
and then it seems to work. Is there anything that I'm missing? If not, is it possible to make jdbi smarter to accommodate this situation?
| e661609b7d9ec7274655471c2997975dcc3edc50 | e833bbc13e9a3a1ce4e356640eb3af501a2e8ad4 | https://github.com/jdbi/jdbi/compare/e661609b7d9ec7274655471c2997975dcc3edc50...e833bbc13e9a3a1ce4e356640eb3af501a2e8ad4 | diff --git a/core/src/main/java/org/jdbi/v3/core/argument/Arguments.java b/core/src/main/java/org/jdbi/v3/core/argument/Arguments.java
index 0937cfd4e..5add74326 100644
--- a/core/src/main/java/org/jdbi/v3/core/argument/Arguments.java
+++ b/core/src/main/java/org/jdbi/v3/core/argument/Arguments.java
@@ -16,6 +16,7 @@ package org.jdbi.v3.core.argument;
import static org.jdbi.v3.core.internal.JdbiStreams.toStream;
import java.lang.reflect.Type;
+import java.sql.Types;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
@@ -27,6 +28,7 @@ import org.jdbi.v3.core.config.JdbiConfig;
public class Arguments implements JdbiConfig<Arguments> {
private final List<ArgumentFactory> argumentFactories = new CopyOnWriteArrayList<>();
private ConfigRegistry registry;
+ private Argument untypedNullArgument = new NullArgument(Types.OTHER);
public Arguments() {
register(BuiltInArgumentFactory.INSTANCE);
@@ -40,6 +42,7 @@ public class Arguments implements JdbiConfig<Arguments> {
private Arguments(Arguments that) {
argumentFactories.addAll(that.argumentFactories);
+ untypedNullArgument = that.untypedNullArgument;
}
public Arguments register(ArgumentFactory factory) {
@@ -60,6 +63,25 @@ public class Arguments implements JdbiConfig<Arguments> {
.findFirst();
}
+ /**
+ * Configure the {@link Argument} to use when binding a null
+ * we don't have a type for.
+ * @param untypedNullArgument the argument to bind
+ */
+ public void setUntypedNullArgument(Argument untypedNullArgument) {
+ if (untypedNullArgument == null) {
+ throw new IllegalArgumentException("the Argument itself may not be null");
+ }
+ this.untypedNullArgument = untypedNullArgument;
+ }
+
+ /**
+ * @return the untyped null argument
+ */
+ public Argument getUntypedNullArgument() {
+ return untypedNullArgument;
+ }
+
@Override
public Arguments createCopy() {
return new Arguments(this);
diff --git a/core/src/main/java/org/jdbi/v3/core/argument/BuiltInArgumentFactory.java b/core/src/main/java/org/jdbi/v3/core/argument/BuiltInArgumentFactory.java
index 0e2cf94ac..c1ef69e6f 100644
--- a/core/src/main/java/org/jdbi/v3/core/argument/BuiltInArgumentFactory.java
+++ b/core/src/main/java/org/jdbi/v3/core/argument/BuiltInArgumentFactory.java
@@ -146,7 +146,7 @@ public class BuiltInArgumentFactory implements ArgumentFactory {
}
return value == null
- ? Optional.of(new NullArgument(Types.NULL))
+ ? Optional.of(config.get(Arguments.class).getUntypedNullArgument())
: Optional.empty();
}
diff --git a/core/src/test/java/org/jdbi/v3/core/argument/TestMapArguments.java b/core/src/test/java/org/jdbi/v3/core/argument/TestMapArguments.java
index 77de2e052..09819c310 100644
--- a/core/src/test/java/org/jdbi/v3/core/argument/TestMapArguments.java
+++ b/core/src/test/java/org/jdbi/v3/core/argument/TestMapArguments.java
@@ -21,7 +21,6 @@ import java.sql.Types;
import java.util.Collections;
import java.util.Map;
-import org.jdbi.v3.core.argument.MapArguments;
import org.jdbi.v3.core.statement.StatementContext;
import org.jdbi.v3.core.statement.StatementContextAccess;
import org.junit.Rule;
@@ -55,6 +54,6 @@ public class TestMapArguments
Map<String, Object> args = Collections.singletonMap("foo", null);
new MapArguments(args, ctx).find("foo").get().apply(3, stmt, null);
- verify(stmt).setNull(3, Types.NULL);
+ verify(stmt).setNull(3, Types.OTHER);
}
} | ['core/src/main/java/org/jdbi/v3/core/argument/BuiltInArgumentFactory.java', 'core/src/test/java/org/jdbi/v3/core/argument/TestMapArguments.java', 'core/src/main/java/org/jdbi/v3/core/argument/Arguments.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 719,317 | 148,181 | 20,409 | 249 | 909 | 193 | 24 | 2 | 605 | 76 | 160 | 17 | 0 | 0 | 2017-03-15T00:46:49 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
32 | jdbi/jdbi/653/643 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/643 | https://github.com/jdbi/jdbi/pull/653 | https://github.com/jdbi/jdbi/pull/653 | 2 | fixes | OutParameters are 1-based | While the in parameters of a Call are 0-based the OutParameters are 1-based. Consider the following stored procedure (HSQLDB).
```sql
CREATE procedure plus1inout (IN arg int, OUT res int)
BEGIN ATOMIC
SET res = arg + 1;
END
/;
```
you would call this using the following code
```java
OutParameters outParameters = handle.createCall("call plus1inout(?, ?);")
.bind(0, argument)
.registerOutParameter(1, Types.INTEGER)
.invoke();
return outParameters.getInt(2);
```
notice how for binding we use 1 base the index of the out parameter but for retrieving we use 2 as the index for the out parameter. | ac28e65d3074822a696f7fa800fe04a6c9cbfdbd | 8d0b70a36260be60d1bef5aec8b623365a217d07 | https://github.com/jdbi/jdbi/compare/ac28e65d3074822a696f7fa800fe04a6c9cbfdbd...8d0b70a36260be60d1bef5aec8b623365a217d07 | diff --git a/core/src/main/java/org/jdbi/v3/core/statement/Call.java b/core/src/main/java/org/jdbi/v3/core/statement/Call.java
index 687972686..04846c12a 100644
--- a/core/src/main/java/org/jdbi/v3/core/statement/Call.java
+++ b/core/src/main/java/org/jdbi/v3/core/statement/Call.java
@@ -94,7 +94,11 @@ public class Call extends SqlStatement<Call>
OutParameters out = new OutParameters();
for ( OutParamArgument param : params ) {
Object obj = param.map((CallableStatement)stmt);
- out.getMap().put(param.position, obj);
+
+ // convert from JDBC 1-based position to JDBI's 0-based
+ int index = param.position - 1;
+ out.getMap().put(index, obj);
+
if ( param.name != null ) {
out.getMap().put(param.name, obj);
}
diff --git a/core/src/test/java/org/jdbi/v3/core/statement/TestCallable.java b/core/src/test/java/org/jdbi/v3/core/statement/TestCallable.java
index 16c9e347b..ae1432400 100644
--- a/core/src/test/java/org/jdbi/v3/core/statement/TestCallable.java
+++ b/core/src/test/java/org/jdbi/v3/core/statement/TestCallable.java
@@ -21,7 +21,6 @@ import java.sql.Types;
import org.assertj.core.data.Offset;
import org.jdbi.v3.core.Handle;
import org.jdbi.v3.core.rule.H2DatabaseRule;
-import org.jdbi.v3.core.statement.OutParameters;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
@@ -49,13 +48,12 @@ public class TestCallable
.bind(1, 100.0d)
.invoke();
- // JDBI oddity : register or bind is 0-indexed, which JDBC is 1-indexed.
Double expected = Math.toDegrees(100.0d);
- assertThat(ret.getDouble(1)).isEqualTo(expected, Offset.offset(0.001));
- assertThat(ret.getLong(1).longValue()).isEqualTo(expected.longValue());
- assertThat(ret.getShort(1).shortValue()).isEqualTo(expected.shortValue());
- assertThat(ret.getInt(1).intValue()).isEqualTo(expected.intValue());
- assertThat(ret.getFloat(1).floatValue()).isEqualTo(expected.floatValue(), Offset.offset(0.001f));
+ assertThat(ret.getDouble(0)).isEqualTo(expected, Offset.offset(0.001));
+ assertThat(ret.getLong(0).longValue()).isEqualTo(expected.longValue());
+ assertThat(ret.getShort(0).shortValue()).isEqualTo(expected.shortValue());
+ assertThat(ret.getInt(0).intValue()).isEqualTo(expected.intValue());
+ assertThat(ret.getFloat(0).floatValue()).isEqualTo(expected.floatValue(), Offset.offset(0.001f));
assertThatExceptionOfType(Exception.class).isThrownBy(()->{
ret.getDate(1);
@@ -95,8 +93,7 @@ public class TestCallable
.registerOutParameter(1, Types.VARCHAR)
.invoke();
- // JDBI oddity : register or bind is 0-indexed, which JDBC is 1-indexed.
- String out = ret.getString(2);
+ String out = ret.getString(1);
assertThat(out).isNull();
}
diff --git a/docs/src/test/java/jdbi/doc/CallTest.java b/docs/src/test/java/jdbi/doc/CallTest.java
index 6efb5248c..76fec1149 100644
--- a/docs/src/test/java/jdbi/doc/CallTest.java
+++ b/docs/src/test/java/jdbi/doc/CallTest.java
@@ -28,7 +28,7 @@ public class CallTest
.registerOutParameter(0, Types.INTEGER)
.bind(1, 13)
.invoke()
- .getInt(1))
+ .getInt(0))
.isEqualTo(42);
// end::call[]
} | ['core/src/test/java/org/jdbi/v3/core/statement/TestCallable.java', 'core/src/main/java/org/jdbi/v3/core/statement/Call.java', 'docs/src/test/java/jdbi/doc/CallTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 701,922 | 144,163 | 19,990 | 240 | 228 | 47 | 6 | 1 | 645 | 90 | 160 | 21 | 0 | 2 | 2016-12-29T05:59:28 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
15 | jdbi/jdbi/1762/1761 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/1761 | https://github.com/jdbi/jdbi/pull/1762 | https://github.com/jdbi/jdbi/pull/1762 | 1 | fixes | NullPointerException in SqlBatch with a constant parameter value of null | I'm trying to execute an `@SqlBatch` with a constant parameter value of `null`. This causes a `NullPointerException` in JDBI while testing if arguments are iterable-like.
This code reproduces the issue:
```java
public interface Sql {
// create table user (tenant_id varchar(20), name varchar(20))
@SqlBatch("insert into user(tenant_id, name) values (:tenantId, :name);")
void insert(String tenantId, @Bind("name") List<String> names);
}
Jdbi jdbi = Jdbi.create(connectionUrl);
jdbi.installPlugin(new SqlObjectPlugin());
jdbi.useHandle(handle -> {
Sql sql = handle.attach(Sql.class);
sql.insert(null, asList("foo", "bar"));
});
```
Stacktrace:
```
java.lang.NullPointerException
at org.jdbi.v3.core.internal.IterableLike.isIterable(IterableLike.java:48)
at org.jdbi.v3.sqlobject.statement.internal.SqlBatchHandler.zipArgs(SqlBatchHandler.java:269)
at org.jdbi.v3.sqlobject.statement.internal.SqlBatchHandler.invoke(SqlBatchHandler.java:174)
at org.jdbi.v3.sqlobject.internal.SqlObjectInitData$1.lambda$invoke$0(SqlObjectInitData.java:125)
at org.jdbi.v3.core.internal.Invocations.invokeWith(Invocations.java:44)
at org.jdbi.v3.core.ConstantHandleSupplier.lambda$invokeInContext$0(ConstantHandleSupplier.java:56)
at org.jdbi.v3.core.internal.Invocations.invokeWith(Invocations.java:44)
at org.jdbi.v3.core.ConstantHandleSupplier.invokeInContext(ConstantHandleSupplier.java:52)
at org.jdbi.v3.sqlobject.internal.SqlObjectInitData$1.call(SqlObjectInitData.java:131)
at org.jdbi.v3.sqlobject.internal.SqlObjectInitData$1.invoke(SqlObjectInitData.java:125)
at org.jdbi.v3.sqlobject.SqlObjectFactory.lambda$attach$2(SqlObjectFactory.java:110)
at com.sun.proxy.$Proxy39.insert(Unknown Source)
``` | ad2ccfcc82cc05cf1a5a99d7fab7e705cc133bed | d5df6a3753459cc375b9021b0836ef4843b5410b | https://github.com/jdbi/jdbi/compare/ad2ccfcc82cc05cf1a5a99d7fab7e705cc133bed...d5df6a3753459cc375b9021b0836ef4843b5410b | diff --git a/core/src/main/java/org/jdbi/v3/core/internal/IterableLike.java b/core/src/main/java/org/jdbi/v3/core/internal/IterableLike.java
index 80b0e3bf2..df982c85b 100644
--- a/core/src/main/java/org/jdbi/v3/core/internal/IterableLike.java
+++ b/core/src/main/java/org/jdbi/v3/core/internal/IterableLike.java
@@ -45,7 +45,7 @@ public class IterableLike {
public static boolean isIterable(Object maybeIterable) {
return maybeIterable instanceof Iterator<?>
|| maybeIterable instanceof Iterable<?>
- || maybeIterable.getClass().isArray();
+ || maybeIterable != null && maybeIterable.getClass().isArray();
}
/**
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestBatching.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestBatching.java
index 8f2d659fb..bd88f6e4a 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestBatching.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestBatching.java
@@ -67,6 +67,21 @@ public class TestBatching {
assertThat(b.size()).isEqualTo(2);
}
+ @Test
+ public void testBindConstantNull() {
+ UsesBatching b = handle.attach(UsesBatching.class);
+ List<Integer> ids = Arrays.asList(1, 2, 3, 4, 5);
+
+ b.withConstantValue(ids, null);
+
+ assertThat(b.size()).isEqualTo(5);
+
+ List<String> names = handle.createQuery("select distinct name from something")
+ .mapTo(String.class)
+ .list();
+ assertThat(names).containsExactly((String) null);
+ }
+
@Test
public void testBindConstantValue() {
UsesBatching b = handle.attach(UsesBatching.class); | ['core/src/main/java/org/jdbi/v3/core/internal/IterableLike.java', 'sqlobject/src/test/java/org/jdbi/v3/sqlobject/TestBatching.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,444,606 | 305,103 | 40,010 | 529 | 128 | 21 | 2 | 1 | 1,747 | 104 | 459 | 34 | 0 | 2 | 2020-09-24T19:04:26 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
10 | jdbi/jdbi/2077/2040 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/2040 | https://github.com/jdbi/jdbi/pull/2077 | https://github.com/jdbi/jdbi/pull/2077 | 1 | fixes | PaginationParameterCustomizer warm() is never called | I have a custom binding annotation ala:
```
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
@SqlStatementCustomizingAnnotation(PaginationCustomizerFactory.class)
public @interface ApiBind
{
}
```
`PaginationCustomizerFactory` implements `createForParameter` and returns a `SqlStatementParameterCustomizer`. The `warm()` method of that parameter customizer is never called (`apply()` is correctly called however). | 18dbd43a5a9b3ccbe711a768bd20ebb3af02bef7 | 2be2f4edae89d3612f99933afb2744017aa4f41f | https://github.com/jdbi/jdbi/compare/18dbd43a5a9b3ccbe711a768bd20ebb3af02bef7...2be2f4edae89d3612f99933afb2744017aa4f41f | diff --git a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/statement/internal/CustomizingStatementHandler.java b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/statement/internal/CustomizingStatementHandler.java
index ac3cffdd8..25fb4d8c0 100644
--- a/sqlobject/src/main/java/org/jdbi/v3/sqlobject/statement/internal/CustomizingStatementHandler.java
+++ b/sqlobject/src/main/java/org/jdbi/v3/sqlobject/statement/internal/CustomizingStatementHandler.java
@@ -75,6 +75,11 @@ abstract class CustomizingStatementHandler<StatementType extends SqlStatement<St
.collect(Collectors.toList());
}
+ @Override
+ public void warm(ConfigRegistry config) {
+ statementCustomizers.forEach(s -> s.warm(config));
+ }
+
// duplicate implementation in SqlObjectFactory
private static Stream<Class<?>> superTypes(Class<?> type) {
Class<?>[] interfaces = type.getInterfaces();
diff --git a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/config/TestUseConfiguredDefaultParameterCustomizerFactory.java b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/config/TestUseConfiguredDefaultParameterCustomizerFactory.java
index 496a0e9b1..9f0de3246 100644
--- a/sqlobject/src/test/java/org/jdbi/v3/sqlobject/config/TestUseConfiguredDefaultParameterCustomizerFactory.java
+++ b/sqlobject/src/test/java/org/jdbi/v3/sqlobject/config/TestUseConfiguredDefaultParameterCustomizerFactory.java
@@ -59,7 +59,9 @@ public class TestUseConfiguredDefaultParameterCustomizerFactory {
SomethingDao h = handle.attach(SomethingDao.class);
h.findByNameAndIdNoBindAnnotation(1, "Joy");
- assertThat(invocationCounter.get()).isEqualTo(2);
+ // factory is called twice for each parameters, once in
+ // warm() and once in apply()
+ assertThat(invocationCounter.get()).isEqualTo(4);
}
@Test | ['sqlobject/src/main/java/org/jdbi/v3/sqlobject/statement/internal/CustomizingStatementHandler.java', 'sqlobject/src/test/java/org/jdbi/v3/sqlobject/config/TestUseConfiguredDefaultParameterCustomizerFactory.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,686,157 | 358,150 | 46,592 | 615 | 130 | 27 | 5 | 1 | 444 | 39 | 92 | 12 | 0 | 1 | 2022-07-25T21:49:40 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
8 | jdbi/jdbi/2198/2197 | jdbi | jdbi | https://github.com/jdbi/jdbi/issues/2197 | https://github.com/jdbi/jdbi/pull/2198 | https://github.com/jdbi/jdbi/pull/2198 | 1 | fixes | JdbiRule fails to clean on flyway 9+ due to cleanDisabled defaulting to true | `JdbiRule` needs a fix equivalent to the one in https://github.com/jdbi/jdbi/pull/2179 | da9ea8f3f682f46ed60c6a8bfbbc392ec9115371 | 4ce05f4622dade4a7bfbcdf5b557d454292d2d09 | https://github.com/jdbi/jdbi/compare/da9ea8f3f682f46ed60c6a8bfbbc392ec9115371...4ce05f4622dade4a7bfbcdf5b557d454292d2d09 | diff --git a/testing/src/main/java/org/jdbi/v3/testing/JdbiRule.java b/testing/src/main/java/org/jdbi/v3/testing/JdbiRule.java
index 8fcb22b57..cdd04c0ba 100644
--- a/testing/src/main/java/org/jdbi/v3/testing/JdbiRule.java
+++ b/testing/src/main/java/org/jdbi/v3/testing/JdbiRule.java
@@ -154,6 +154,7 @@ public abstract class JdbiRule extends ExternalResource {
.dataSource(getDataSource())
.locations(migration.paths.toArray(new String[0]))
.schemas(migration.schemas.toArray(new String[0]))
+ .cleanDisabled(!migration.cleanAfter)
.load();
flyway.migrate();
}
diff --git a/testing/src/main/java/org/jdbi/v3/testing/Migration.java b/testing/src/main/java/org/jdbi/v3/testing/Migration.java
index 18268c592..32a57b4ea 100644
--- a/testing/src/main/java/org/jdbi/v3/testing/Migration.java
+++ b/testing/src/main/java/org/jdbi/v3/testing/Migration.java
@@ -75,6 +75,15 @@ public class Migration {
return this;
}
+ /**
+ * Controls whether to drop all objects in the configured schemas after running the tests using Flyway.
+ */
+ @SuppressWarnings("HiddenField")
+ public Migration cleanAfter(boolean cleanAfter) {
+ this.cleanAfter = cleanAfter;
+ return this;
+ }
+
/**
* Create new Migration.
*/ | ['testing/src/main/java/org/jdbi/v3/testing/JdbiRule.java', 'testing/src/main/java/org/jdbi/v3/testing/Migration.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,755,053 | 372,212 | 48,415 | 626 | 343 | 65 | 10 | 2 | 86 | 10 | 29 | 1 | 1 | 0 | 2022-11-18T18:47:59 | 1,804 | Java | {'Java': 3750631, 'Kotlin': 121092, 'HTML': 33996, 'FreeMarker': 5773, 'ANTLR': 5398, 'Makefile': 3525, 'PLSQL': 1420, 'Shell': 981} | Apache License 2.0 |
530 | spring-projects/spring-session/450/445 | spring-projects | spring-session | https://github.com/spring-projects/spring-session/issues/445 | https://github.com/spring-projects/spring-session/pull/450 | https://github.com/spring-projects/spring-session/pull/450 | 2 | fixes | JdbcOperationsSessionRepository ignores lastAccessTime | Session always expires after maxInactiveInterval since creation, because lastAccessTime is not loaded from database column.
Session inactivity is checked using de-serialized object, which is **not** updated using UPDATE_SESSION_LAST_ACCESS_TIME_QUERY executed when there are no changes to session attributes.
Session in deleted by the check session.isExpired() in JdbcOperationsSessionRepository.getSession(String id)
| bd2d84691794b8c23f081cec998289b67c99b037 | 63006db45d4a5d29534078b04c78640c3181e179 | https://github.com/spring-projects/spring-session/compare/bd2d84691794b8c23f081cec998289b67c99b037...63006db45d4a5d29534078b04c78640c3181e179 | diff --git a/spring-session/src/integration-test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryITests.java b/spring-session/src/integration-test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryITests.java
index 0c0b7221..07588690 100644
--- a/spring-session/src/integration-test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryITests.java
+++ b/spring-session/src/integration-test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryITests.java
@@ -37,6 +37,7 @@ import org.springframework.security.core.Authentication;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.session.ExpiringSession;
import org.springframework.session.FindByIndexNameSessionRepository;
import org.springframework.session.MapSession;
import org.springframework.session.Session;
@@ -135,6 +136,26 @@ public class JdbcOperationsSessionRepositoryITests {
this.repository.delete(toSave.getId());
}
+ @Test
+ public void updateLastAccessedTime() {
+ JdbcOperationsSessionRepository.JdbcSession toSave = this.repository
+ .createSession();
+ toSave.setLastAccessedTime(System.currentTimeMillis()
+ - (MapSession.DEFAULT_MAX_INACTIVE_INTERVAL_SECONDS * 1000 + 1000));
+
+ this.repository.save(toSave);
+
+ long lastAccessedTime = System.currentTimeMillis();
+ toSave.setLastAccessedTime(lastAccessedTime);
+ this.repository.save(toSave);
+
+ ExpiringSession session = this.repository.getSession(toSave.getId());
+
+ assertThat(session).isNotNull();
+ assertThat(session.isExpired()).isFalse();
+ assertThat(session.getLastAccessedTime()).isEqualTo(lastAccessedTime);
+ }
+
@Test
public void findByPrincipalName() throws Exception {
String principalName = "findByPrincipalName" + UUID.randomUUID();
diff --git a/spring-session/src/main/java/org/springframework/session/jdbc/JdbcOperationsSessionRepository.java b/spring-session/src/main/java/org/springframework/session/jdbc/JdbcOperationsSessionRepository.java
index cd7b3866..a515cdbf 100644
--- a/spring-session/src/main/java/org/springframework/session/jdbc/JdbcOperationsSessionRepository.java
+++ b/spring-session/src/main/java/org/springframework/session/jdbc/JdbcOperationsSessionRepository.java
@@ -102,7 +102,7 @@ public class JdbcOperationsSessionRepository implements
private static final String CREATE_SESSION_QUERY = "INSERT INTO %TABLE_NAME%(SESSION_ID, LAST_ACCESS_TIME, PRINCIPAL_NAME, SESSION_BYTES) VALUES (?, ?, ?, ?)";
- private static final String GET_SESSION_QUERY = "SELECT SESSION_BYTES FROM %TABLE_NAME% WHERE SESSION_ID = ?";
+ private static final String GET_SESSION_QUERY = "SELECT LAST_ACCESS_TIME, SESSION_BYTES FROM %TABLE_NAME% WHERE SESSION_ID = ?";
private static final String UPDATE_SESSION_QUERY = "UPDATE %TABLE_NAME% SET LAST_ACCESS_TIME = ?, PRINCIPAL_NAME = ?, SESSION_BYTES = ? WHERE SESSION_ID = ?";
@@ -110,7 +110,7 @@ public class JdbcOperationsSessionRepository implements
private static final String DELETE_SESSION_QUERY = "DELETE FROM %TABLE_NAME% WHERE SESSION_ID = ?";
- private static final String LIST_SESSIONS_BY_PRINCIPAL_NAME_QUERY = "SELECT SESSION_BYTES FROM %TABLE_NAME% WHERE PRINCIPAL_NAME = ?";
+ private static final String LIST_SESSIONS_BY_PRINCIPAL_NAME_QUERY = "SELECT LAST_ACCESS_TIME, SESSION_BYTES FROM %TABLE_NAME% WHERE PRINCIPAL_NAME = ?";
private static final String DELETE_SESSIONS_BY_LAST_ACCESS_TIME_QUERY = "DELETE FROM %TABLE_NAME% WHERE LAST_ACCESS_TIME < ?";
@@ -473,12 +473,14 @@ public class JdbcOperationsSessionRepository implements
private class ExpiringSessionMapper implements RowMapper<ExpiringSession> {
public ExpiringSession mapRow(ResultSet rs, int rowNum) throws SQLException {
- return (ExpiringSession) JdbcOperationsSessionRepository.this.conversionService
- .convert(
+ ExpiringSession session = (ExpiringSession) JdbcOperationsSessionRepository
+ .this.conversionService.convert(
JdbcOperationsSessionRepository.this.lobHandler
.getBlobAsBytes(rs, "SESSION_BYTES"),
TypeDescriptor.valueOf(byte[].class),
TypeDescriptor.valueOf(ExpiringSession.class));
+ session.setLastAccessedTime(rs.getLong("LAST_ACCESS_TIME"));
+ return session;
}
}
diff --git a/spring-session/src/test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryTests.java b/spring-session/src/test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryTests.java
index d4714b01..b2e5da1f 100644
--- a/spring-session/src/test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryTests.java
+++ b/spring-session/src/test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryTests.java
@@ -212,7 +212,6 @@ public class JdbcOperationsSessionRepositoryTests {
}
@Test
- @SuppressWarnings("unchecked")
public void getSessionNotFound() {
String sessionId = "testSessionId";
@@ -225,10 +224,10 @@ public class JdbcOperationsSessionRepositoryTests {
}
@Test
- @SuppressWarnings("unchecked")
public void getSessionExpired() {
MapSession expired = new MapSession();
- expired.setMaxInactiveIntervalInSeconds(0);
+ expired.setLastAccessedTime(System.currentTimeMillis() -
+ (MapSession.DEFAULT_MAX_INACTIVE_INTERVAL_SECONDS * 1000 + 1000));
given(this.jdbcOperations.queryForObject(startsWith("SELECT"),
eq(new Object[] { expired.getId() }), isA(RowMapper.class)))
.willReturn(expired);
@@ -244,7 +243,6 @@ public class JdbcOperationsSessionRepositoryTests {
}
@Test
- @SuppressWarnings("unchecked")
public void getSessionFound() {
MapSession saved = new MapSession();
saved.setAttribute("savedName", "savedValue");
@@ -283,7 +281,6 @@ public class JdbcOperationsSessionRepositoryTests {
}
@Test
- @SuppressWarnings("unchecked")
public void findByIndexNameAndIndexValuePrincipalIndexNameNotFound() {
String principal = "username";
@@ -298,7 +295,6 @@ public class JdbcOperationsSessionRepositoryTests {
}
@Test
- @SuppressWarnings("unchecked")
public void findByIndexNameAndIndexValuePrincipalIndexNameFound() {
String principal = "username";
Authentication authentication = new UsernamePasswordAuthenticationToken(principal, | ['spring-session/src/test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryTests.java', 'spring-session/src/integration-test/java/org/springframework/session/jdbc/JdbcOperationsSessionRepositoryITests.java', 'spring-session/src/main/java/org/springframework/session/jdbc/JdbcOperationsSessionRepository.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 471,995 | 101,064 | 14,301 | 146 | 839 | 185 | 10 | 1 | 420 | 47 | 75 | 6 | 0 | 0 | 2016-03-28T15:18:40 | 1,796 | Java | {'Java': 1667930, 'Groovy': 56498, 'HTML': 36763, 'JavaScript': 12662, 'Ruby': 7007, 'Shell': 213} | Apache License 2.0 |
1,362 | synthetichealth/synthea/1147/1146 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/1146 | https://github.com/synthetichealth/synthea/pull/1147 | https://github.com/synthetichealth/synthea/pull/1147 | 1 | fixes | Missing identifier in patient home's location resource | ### What happened?
When pushing generated patients to a fhir server (after the locations have been pushed before), the patient's home location is referenced by its identifier even though the initially pushed patient's home doesn't have an identifier set. The request is therefore denied.
Here is the patient's home location as part of the generated location resource bundle:
```
{
"fullUrl": "urn:uuid:bb1ad573-19b8-9cd8-68fb-0e6f684df992",
"resource": {
"resourceType": "Location",
"id": "bb1ad573-19b8-9cd8-68fb-0e6f684df992",
"status": "active",
"description": "Patient's Home",
"mode": "kind",
"physicalType": {
"coding": [ {
"system": "http://terminology.hl7.org/CodeSystem/location-physical-type",
"code": "ho",
"display": "House"
} ]
}
},
"request": {
"method": "POST",
"url": "Location"
}
}
```
Here is an Encounter. Notice the location reference.
```
{
"fullUrl": "urn:uuid:48cd26ad-03d0-d1ed-03ed-38c534490e4e",
"resource": {
"resourceType": "Encounter",
"id": "48cd26ad-03d0-d1ed-03ed-38c534490e4e",
"meta": {
"profile": [ "http://hl7.org/fhir/us/core/StructureDefinition/us-core-encounter" ]
},
"identifier": [ {
"use": "official",
"system": "https://github.com/synthetichealth/synthea",
"value": "48cd26ad-03d0-d1ed-03ed-38c534490e4e"
} ],
"status": "finished",
"class": {
"system": "http://terminology.hl7.org/CodeSystem/v3-ActCode",
"code": "VR"
},
"type": [ {
"coding": [ {
"system": "http://snomed.info/sct",
"code": "390906007",
"display": "Hypertension follow-up encounter"
} ],
"text": "Hypertension follow-up encounter"
} ],
"subject": {
"reference": "urn:uuid:5223c440-2e86-8470-fcde-69f56234897a",
"display": "Mrs. Lorie307 Barabara924 Barrows492"
},
"participant": [ {
"type": [ {
"coding": [ {
"system": "http://terminology.hl7.org/CodeSystem/v3-ParticipationType",
"code": "PPRF",
"display": "primary performer"
} ],
"text": "primary performer"
} ],
"period": {
"start": "2021-02-21T23:04:44+00:00",
"end": "2021-02-21T23:19:44+00:00"
},
"individual": {
"reference": "Practitioner?identifier=http://hl7.org/fhir/sid/us-npi|9999995795",
"display": "Dr. Harland508 Ryan260"
}
} ],
"period": {
"start": "2021-02-21T23:04:44+00:00",
"end": "2021-02-21T23:19:44+00:00"
},
"reasonCode": [ {
"coding": [ {
"system": "http://snomed.info/sct",
"code": "59621000",
"display": "Hypertension"
} ]
} ],
"location": [ {
"location": {
"reference": "Location?identifier=https://github.com/synthetichealth/synthea|bb1ad573-19b8-9cd8-68fb-0e6f684df992",
"display": "Patient's Home"
}
} ],
"serviceProvider": {
"reference": "Organization?identifier=https://github.com/synthetichealth/synthea|3d10019f-c88e-3de5-9916-6107b9c0263d",
"display": "NEWTON-WELLESLEY HOSPITAL"
}
},
"request": {
"method": "POST",
"url": "Encounter"
}
}
```
Since other locations have an identifier set I assume that the identifier needs to be added here: https://github.com/synthetichealth/synthea/blob/cd8062ebce51203f64cc48dfcb78c27c56166272/src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java#L24
### Environment
```markdown
- OS: Ubuntu 22.04
- Java: 11
```
### Relevant log output
_No response_ | cd8062ebce51203f64cc48dfcb78c27c56166272 | a5eb51c7a0bce59163d732de717dedfc9be3ae0b | https://github.com/synthetichealth/synthea/compare/cd8062ebce51203f64cc48dfcb78c27c56166272...a5eb51c7a0bce59163d732de717dedfc9be3ae0b | diff --git a/src/main/java/org/mitre/synthea/export/FhirR4.java b/src/main/java/org/mitre/synthea/export/FhirR4.java
index e7bad1df2..b83a33f0b 100644
--- a/src/main/java/org/mitre/synthea/export/FhirR4.java
+++ b/src/main/java/org/mitre/synthea/export/FhirR4.java
@@ -170,7 +170,7 @@ public class FhirR4 {
private static final String UNITSOFMEASURE_URI = "http://unitsofmeasure.org";
private static final String DICOM_DCM_URI = "http://dicom.nema.org/resources/ontology/DCM";
private static final String MEDIA_TYPE_URI = "http://terminology.hl7.org/CodeSystem/media-type";
- private static final String SYNTHEA_IDENTIFIER = "https://github.com/synthetichealth/synthea";
+ protected static final String SYNTHEA_IDENTIFIER = "https://github.com/synthetichealth/synthea";
@SuppressWarnings("rawtypes")
private static final Map raceEthnicityCodes = loadRaceEthnicityCodes();
diff --git a/src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java b/src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java
index 25be0a7cd..bcfa64e31 100644
--- a/src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java
+++ b/src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java
@@ -2,6 +2,7 @@ package org.mitre.synthea.export;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
+import org.hl7.fhir.r4.model.Identifier;
import org.hl7.fhir.r4.model.Location;
import org.hl7.fhir.r4.model.codesystems.LocationPhysicalType;
import org.mitre.synthea.world.agents.Person;
@@ -34,6 +35,9 @@ public class FhirR4PatientHome {
// Not really generating a random UUID. Given that this is not tied to a particular provider
// or person, this just makes up a person with a hardcoded random seed.
patientHome.setId(new Person(1).randUUID().toString());
+ Identifier identifier = patientHome.addIdentifier();
+ identifier.setSystem(FhirR4.SYNTHEA_IDENTIFIER);
+ identifier.setValue(patientHome.getId());
}
return patientHome;
} | ['src/main/java/org/mitre/synthea/export/FhirR4PatientHome.java', 'src/main/java/org/mitre/synthea/export/FhirR4.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,873,423 | 439,554 | 49,825 | 132 | 403 | 91 | 6 | 2 | 3,890 | 306 | 1,142 | 121 | 11 | 3 | 2022-09-14T12:52:34 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
1,360 | synthetichealth/synthea/1299/1296 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/1296 | https://github.com/synthetichealth/synthea/pull/1299 | https://github.com/synthetichealth/synthea/pull/1299 | 1 | fixes | Running synthea via .jar release with config "physiology.state.enabled=true" throws java.io.FileNotFoundException | ### What happened?
Downloading `synthea-with-dependencies.jar` and running `java -jar ./synthea-with-dependencies.jar --physiology.state.enabled true` throws `java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml`
Cloning the repository and running `./run_synthea --physiology.state.enabled=true` works.
### Environment
```markdown
- OS: macOS 13.3.1
- Java: openjdk 17.0.7
```
### Relevant log output
```shell
Loading module modules/epilepsy.json
Loading module modules/female_reproduction.json
Loading module modules/fibromyalgia.json
Loading module modules/food_allergies.json
Loading module modules/gallstones.json
WARNING: sun.reflect.Reflection.getCallerClass is not supported. This will impact performance.
java.lang.RuntimeException: java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml (No such file or directory)
at org.mitre.synthea.engine.PhysiologySimulator.<init>(PhysiologySimulator.java:202)
at org.mitre.synthea.engine.State$Physiology.setup(State.java:350)
at org.mitre.synthea.engine.State$Physiology.initialize(State.java:345)
at org.mitre.synthea.engine.State.build(State.java:123)
at org.mitre.synthea.engine.Module.<init>(Module.java:327)
at org.mitre.synthea.engine.Module.loadFile(Module.java:218)
at org.mitre.synthea.engine.Module.lambda$walkModuleTree$0(Module.java:141)
at org.mitre.synthea.engine.Module$ModuleSupplier.get(Module.java:507)
at org.mitre.synthea.engine.Module.lambda$getModules$4(Module.java:261)
at java.base/java.util.TreeMap.forEach(TreeMap.java:1282)
at org.mitre.synthea.engine.Module.getModules(Module.java:257)
at org.mitre.synthea.engine.Generator.init(Generator.java:276)
at org.mitre.synthea.engine.Generator.<init>(Generator.java:211)
at org.mitre.synthea.engine.Generator.<init>(Generator.java:187)
at App.main(App.java:233)
Caused by: java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml (No such file or directory)
at java.base/java.io.FileInputStream.open0(Native Method)
at java.base/java.io.FileInputStream.open(FileInputStream.java:216)
at java.base/java.io.FileInputStream.<init>(FileInputStream.java:157)
at org.sbml.jsbml.xml.stax.SBMLReader.readSBML(SBMLReader.java:302)
at org.sbml.jsbml.xml.stax.SBMLReader.readSBML(SBMLReader.java:288)
at org.mitre.synthea.engine.PhysiologySimulator.<init>(PhysiologySimulator.java:200)
... 14 more
Exception in thread "main" java.lang.RuntimeException: java.lang.RuntimeException: java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml (No such file or directory)
at org.mitre.synthea.engine.Module$ModuleSupplier.get(Module.java:517)
at org.mitre.synthea.engine.Module.lambda$getModules$4(Module.java:261)
at java.base/java.util.TreeMap.forEach(TreeMap.java:1282)
at org.mitre.synthea.engine.Module.getModules(Module.java:257)
at org.mitre.synthea.engine.Generator.init(Generator.java:276)
at org.mitre.synthea.engine.Generator.<init>(Generator.java:211)
at org.mitre.synthea.engine.Generator.<init>(Generator.java:187)
at App.main(App.java:233)
Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml (No such file or directory)
at org.mitre.synthea.engine.PhysiologySimulator.<init>(PhysiologySimulator.java:202)
at org.mitre.synthea.engine.State$Physiology.setup(State.java:350)
at org.mitre.synthea.engine.State$Physiology.initialize(State.java:345)
at org.mitre.synthea.engine.State.build(State.java:123)
at org.mitre.synthea.engine.Module.<init>(Module.java:327)
at org.mitre.synthea.engine.Module.loadFile(Module.java:218)
at org.mitre.synthea.engine.Module.lambda$walkModuleTree$0(Module.java:141)
at org.mitre.synthea.engine.Module$ModuleSupplier.get(Module.java:507)
... 7 more
Caused by: java.io.FileNotFoundException: /physiology/models/circulation/McSharry2003_Synthetic_ECG.xml (No such file or directory)
at java.base/java.io.FileInputStream.open0(Native Method)
at java.base/java.io.FileInputStream.open(FileInputStream.java:216)
at java.base/java.io.FileInputStream.<init>(FileInputStream.java:157)
at org.sbml.jsbml.xml.stax.SBMLReader.readSBML(SBMLReader.java:302)
at org.sbml.jsbml.xml.stax.SBMLReader.readSBML(SBMLReader.java:288)
at org.mitre.synthea.engine.PhysiologySimulator.<init>(PhysiologySimulator.java:200)
... 14 more
```
| 9ad8bbb725d329a1c6492e9f7a042d221dca0a78 | 65e4c86d4b5ca5d66ee212c8e9f82da70ebfdb59 | https://github.com/synthetichealth/synthea/compare/9ad8bbb725d329a1c6492e9f7a042d221dca0a78...65e4c86d4b5ca5d66ee212c8e9f82da70ebfdb59 | diff --git a/src/main/java/org/mitre/synthea/engine/PhysiologySimulator.java b/src/main/java/org/mitre/synthea/engine/PhysiologySimulator.java
index 15e0aa79f..1e59455c5 100755
--- a/src/main/java/org/mitre/synthea/engine/PhysiologySimulator.java
+++ b/src/main/java/org/mitre/synthea/engine/PhysiologySimulator.java
@@ -7,8 +7,6 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
-import java.net.URISyntaxException;
-import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -25,6 +23,7 @@ import org.apache.commons.math.ode.DerivativeException;
import org.mitre.synthea.helpers.ChartRenderer;
import org.mitre.synthea.helpers.ChartRenderer.MultiTableChartConfig;
import org.mitre.synthea.helpers.ChartRenderer.MultiTableSeriesConfig;
+import org.mitre.synthea.helpers.Utilities;
import org.sbml.jsbml.Model;
import org.sbml.jsbml.SBMLDocument;
import org.sbml.jsbml.SBMLException;
@@ -51,11 +50,8 @@ import org.yaml.snakeyaml.constructor.Constructor;
*/
public class PhysiologySimulator {
- private static final URL MODELS_RESOURCE = ClassLoader.getSystemClassLoader()
- .getResource("physiology/models");
private static final Map<String, Class<?>> SOLVER_CLASSES;
private static Map<String, Model> MODEL_CACHE;
- private static Path SBML_PATH;
private static Path OUTPUT_PATH = Paths.get("output", "physiology");
private final Model model;
@@ -151,24 +147,10 @@ public class PhysiologySimulator {
// Make unmodifiable so it doesn't change after initialization
SOLVER_CLASSES = Collections.unmodifiableMap(initSolvers);
- try {
- SBML_PATH = Paths.get(MODELS_RESOURCE.toURI());
- } catch (URISyntaxException ex) {
- throw new RuntimeException(ex);
- }
-
// Initialize our model cache
MODEL_CACHE = new HashMap<String, Model>();
}
- /**
- * Sets the path to search for SBML model files.
- * @param newPath new path to use
- */
- public static void setModelsPath(Path newPath) {
- SBML_PATH = newPath;
- }
-
/**
* Sets the path to place main simulation results in.
* @param newPath new path to use
@@ -192,12 +174,11 @@ public class PhysiologySimulator {
model = MODEL_CACHE.get(modelPath);
} else {
// Load and instantiate the model from the SBML file
- Path modelFilepath = Paths.get(SBML_PATH.toString(), modelPath);
SBMLReader reader = new SBMLReader();
- File inputFile = new File(modelFilepath.toString());
SBMLDocument doc;
try {
- doc = reader.readSBML(inputFile);
+ String sbmlContent = Utilities.readResourceOrPath("physiology/models/" + modelPath);
+ doc = reader.readSBMLFromString(sbmlContent);
} catch (IOException | XMLStreamException ex) {
throw new RuntimeException(ex);
} | ['src/main/java/org/mitre/synthea/engine/PhysiologySimulator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 2,050,736 | 482,253 | 54,343 | 177 | 925 | 204 | 25 | 1 | 4,828 | 211 | 1,200 | 77 | 0 | 2 | 2023-05-25T15:38:49 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
1,361 | synthetichealth/synthea/1161/1160 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/1160 | https://github.com/synthetichealth/synthea/pull/1161 | https://github.com/synthetichealth/synthea/pull/1161 | 1 | fixes | manifest.xml timestamp sporadically drops seconds from the timestamp text | ### What happened?
On several occasions, I have seen the manifest TS drop the ss (seconds) portion of the timestamp; and example:
`<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<dataSetManifest xmlns="http://cms.hhs.gov/bluebutton/api/schema/ccw-rif/v9" timestamp="2022-09-26T18:45Z" sequenceId="0" syntheticData="true">
<entry name="beneficiary_2012.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2013.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2014.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2015.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2016.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2017.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2018.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2019.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2020.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2021.csv" type="BENEFICIARY"/>
<entry name="beneficiary_2022.csv" type="BENEFICIARY"/>
<entry name="beneficiary_history.csv" type="BENEFICIARY_HISTORY"/>
<entry name="carrier.csv" type="CARRIER"/>
<entry name="dme.csv" type="DME"/>
<entry name="hha.csv" type="HHA"/>
<entry name="hospice.csv" type="HOSPICE"/>
<entry name="inpatient.csv" type="INPATIENT"/>
<entry name="outpatient.csv" type="OUTPATIENT"/>
<entry name="pde.csv" type="PDE"/>
<entry name="snf.csv" type="SNF"/>
</dataSetManifest>`
`
This affects the ETL load as the S3 bucket/folders are inconsistently named. The code in question is:
BB2RIFExporter.java:
`
manifest.write(String.format(" timestamp=\\"%s\\" ",
java.time.Instant.now()
.atZone(java.time.ZoneId.of("Z"))
.truncatedTo(java.time.temporal.ChronoUnit.SECONDS)
.toString()));
`
This happens sporadically; am. guessing that maybe the seconds just happened to be zero and they got dropped in the toString() ?
### Environment
```markdown
macOS 12.6
OpenJDK Runtime Environment Temurin-11.0.13+8 (build 11.0.13+8)
OpenJDK 64-Bit Server VM Temurin-11.0.13+8 (build 11.0.13+8, mixed mode)
```
### Relevant log output
_No response_ | 32478f4fd48a355fb35434e8958aa25f30cb4443 | 5882bd6603f8febafabfcfc9f8b7f27721dde780 | https://github.com/synthetichealth/synthea/compare/32478f4fd48a355fb35434e8958aa25f30cb4443...5882bd6603f8febafabfcfc9f8b7f27721dde780 | diff --git a/src/main/java/org/mitre/synthea/export/BB2RIFExporter.java b/src/main/java/org/mitre/synthea/export/BB2RIFExporter.java
index 82efe4448..85884a2e7 100644
--- a/src/main/java/org/mitre/synthea/export/BB2RIFExporter.java
+++ b/src/main/java/org/mitre/synthea/export/BB2RIFExporter.java
@@ -21,6 +21,7 @@ import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -294,6 +295,9 @@ public class BB2RIFExporter {
rifWriters = new RifWriters(outputDirectory);
}
+ private static DateTimeFormatter MANIFEST_TIMESTAMP_FORMAT
+ = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssX");
+
/**
* Export a manifest file that lists all of the other BFD files (except the NPI file
* which is special).
@@ -309,7 +313,7 @@ public class BB2RIFExporter {
java.time.Instant.now()
.atZone(java.time.ZoneId.of("Z"))
.truncatedTo(java.time.temporal.ChronoUnit.SECONDS)
- .toString()));
+ .format(MANIFEST_TIMESTAMP_FORMAT)));
manifest.write("sequenceId=\\"0\\" syntheticData=\\"true\\">\\n");
for (Class<?> rifFile: BB2RIFStructure.RIF_FILES) {
for (int year: rifWriters.getYears()) { | ['src/main/java/org/mitre/synthea/export/BB2RIFExporter.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,924,648 | 450,735 | 51,136 | 142 | 272 | 48 | 6 | 1 | 2,188 | 169 | 651 | 53 | 1 | 1 | 2022-09-26T22:05:36 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
1,365 | synthetichealth/synthea/410/395 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/395 | https://github.com/synthetichealth/synthea/pull/410 | https://github.com/synthetichealth/synthea/pull/410 | 1 | fixes | Latitude and Longitude are reversed | The latitude and longitude of patient coordinates appear to be reversed. It's consistent though as patients are correctly assigned to the geographically closest provider, not some random corner of the state.
From a sample FHIR record for a patient in MA:
```
"address": [
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/geolocation",
"extension": [
{
"url": "latitude",
"valueDecimal": -71.026717
},
{
"url": "longitude",
"valueDecimal": 42.021617
}
]
}
],
```
| 87661476ec2da9829584cca394506fef4c4e5ef7 | 9d9e276ab393003352432576df648a3b297407a7 | https://github.com/synthetichealth/synthea/compare/87661476ec2da9829584cca394506fef4c4e5ef7...9d9e276ab393003352432576df648a3b297407a7 | diff --git a/src/main/java/org/mitre/synthea/export/FhirDstu2.java b/src/main/java/org/mitre/synthea/export/FhirDstu2.java
index 036e09c05..37adfd8c1 100644
--- a/src/main/java/org/mitre/synthea/export/FhirDstu2.java
+++ b/src/main/java/org/mitre/synthea/export/FhirDstu2.java
@@ -388,7 +388,7 @@ public class FhirDstu2 {
addrResource.setCountry(COUNTRY_CODE);
}
- DirectPosition2D coord = (DirectPosition2D) person.attributes.get(Person.COORDINATE);
+ DirectPosition2D coord = person.getLatLon();
if (coord != null) {
ExtensionDt geolocationExtension = new ExtensionDt();
geolocationExtension.setUrl("http://hl7.org/fhir/StructureDefinition/geolocation");
diff --git a/src/main/java/org/mitre/synthea/export/FhirStu3.java b/src/main/java/org/mitre/synthea/export/FhirStu3.java
index a9f844660..c49d64571 100644
--- a/src/main/java/org/mitre/synthea/export/FhirStu3.java
+++ b/src/main/java/org/mitre/synthea/export/FhirStu3.java
@@ -483,7 +483,7 @@ public class FhirStu3 {
mapCodeToCodeableConcept(maritalStatusCode, "http://hl7.org/fhir/v3/MaritalStatus"));
}
- DirectPosition2D coord = (DirectPosition2D) person.attributes.get(Person.COORDINATE);
+ DirectPosition2D coord = person.getLatLon();
if (coord != null) {
Extension geolocation = addrResource.addExtension();
geolocation.setUrl("http://hl7.org/fhir/StructureDefinition/geolocation");
diff --git a/src/main/java/org/mitre/synthea/world/agents/Provider.java b/src/main/java/org/mitre/synthea/world/agents/Provider.java
index db27a6194..293c6bbec 100644
--- a/src/main/java/org/mitre/synthea/world/agents/Provider.java
+++ b/src/main/java/org/mitre/synthea/world/agents/Provider.java
@@ -272,7 +272,7 @@ public class Provider implements QuadTreeData {
}
double lat = Double.parseDouble(line.remove("LAT"));
double lon = Double.parseDouble(line.remove("LON"));
- d.coordinates = new DirectPosition2D(lat, lon);
+ d.coordinates = new DirectPosition2D(lon, lat);
return d;
}
diff --git a/src/main/java/org/mitre/synthea/world/geography/Place.java b/src/main/java/org/mitre/synthea/world/geography/Place.java
index c6afdff1f..e270212bd 100644
--- a/src/main/java/org/mitre/synthea/world/geography/Place.java
+++ b/src/main/java/org/mitre/synthea/world/geography/Place.java
@@ -31,7 +31,7 @@ public class Place implements QuadTreeData {
this.postalCode = row.get("ZCTA5");
double lat = Double.parseDouble(row.get("LAT"));
double lon = Double.parseDouble(row.get("LON"));
- this.coordinate = new DirectPosition2D(lat, lon);
+ this.coordinate = new DirectPosition2D(lon, lat);
}
/** | ['src/main/java/org/mitre/synthea/world/agents/Provider.java', 'src/main/java/org/mitre/synthea/world/geography/Place.java', 'src/main/java/org/mitre/synthea/export/FhirStu3.java', 'src/main/java/org/mitre/synthea/export/FhirDstu2.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 640,583 | 146,622 | 17,253 | 49 | 494 | 120 | 8 | 4 | 777 | 68 | 140 | 25 | 1 | 1 | 2018-08-23T17:53:08 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
1,364 | synthetichealth/synthea/641/608 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/608 | https://github.com/synthetichealth/synthea/pull/641 | https://github.com/synthetichealth/synthea/pull/641 | 1 | fix | Providers CSV exports UUIDs with duplicates | Hello, it seems that even with a small population of 10 that the id of some providers is not unique in the csv export. Is this possibly the line of code that is the cause for that
https://github.com/synthetichealth/synthea/blob/bcb17a9532645f03f6410b7d5cf67092592842e7/src/main/java/org/mitre/synthea/world/agents/Provider.java#L476
or am I doing something wrong? | e25b2a02ad31f7c86096c12b7d6f2e4ba34364cc | 57702711e128e96e6a319952d52d462d0e353daa | https://github.com/synthetichealth/synthea/compare/e25b2a02ad31f7c86096c12b7d6f2e4ba34364cc...57702711e128e96e6a319952d52d462d0e353daa | diff --git a/src/main/java/org/mitre/synthea/world/agents/Clinician.java b/src/main/java/org/mitre/synthea/world/agents/Clinician.java
index 50902977e..352f3d84c 100644
--- a/src/main/java/org/mitre/synthea/world/agents/Clinician.java
+++ b/src/main/java/org/mitre/synthea/world/agents/Clinician.java
@@ -54,7 +54,9 @@ public class Clinician implements Serializable, QuadTreeElement {
*/
public Clinician(long clinicianSeed, Random clinicianRand,
long identifier, Provider organization) {
- this.uuid = new UUID(clinicianSeed, identifier).toString();
+ String base = clinicianSeed + ":" + identifier + ":" +
+ organization.id + ":" + clinicianRand.nextLong();
+ this.uuid = UUID.nameUUIDFromBytes(base.getBytes()).toString();
this.random = clinicianRand;
this.identifier = identifier;
this.organization = organization; | ['src/main/java/org/mitre/synthea/world/agents/Clinician.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,248,351 | 283,626 | 32,733 | 86 | 255 | 57 | 4 | 1 | 365 | 44 | 104 | 3 | 1 | 0 | 2020-01-22T21:36:40 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
1,366 | synthetichealth/synthea/365/351 | synthetichealth | synthea | https://github.com/synthetichealth/synthea/issues/351 | https://github.com/synthetichealth/synthea/pull/365 | https://github.com/synthetichealth/synthea/pull/365 | 1 | fixes | Encounter.diagnosePastConditions | The `Encounter.diagnosePastConditions` method has a couple issues:
https://github.com/synthetichealth/synthea/blob/master/src/main/java/org/mitre/synthea/engine/State.java#L527
1. It is inconsistent with the description of how conditions being diagnosed is supposed to work. Per
https://github.com/synthetichealth/synthea/wiki/Generic-Module-Framework%3A-States#conditiononset a condition should only be diagnosed at the target_encounter, not any encounter
2. It loops over every state in the module's history, which can be slow as module histories grow over the years. It should break early if possible, ex if it finds another instance of the same state in the history. | 279d9dfe54eb88d8790e71dfe0b0fde0f439d059 | b811698ee341b7096987211c6f650dea9d61d9e8 | https://github.com/synthetichealth/synthea/compare/279d9dfe54eb88d8790e71dfe0b0fde0f439d059...b811698ee341b7096987211c6f650dea9d61d9e8 | diff --git a/src/main/java/org/mitre/synthea/engine/State.java b/src/main/java/org/mitre/synthea/engine/State.java
index a0ffd9fe8..33495d246 100644
--- a/src/main/java/org/mitre/synthea/engine/State.java
+++ b/src/main/java/org/mitre/synthea/engine/State.java
@@ -525,9 +525,17 @@ public abstract class State implements Cloneable {
}
private void diagnosePastConditions(Person person, long time) {
+ // reminder: history[0] is current state, history[size-1] is Initial
for (State state : person.history) {
- if (state instanceof OnsetState && !((OnsetState) state).diagnosed) {
- ((OnsetState) state).diagnose(person, time);
+ if (state instanceof OnsetState) {
+ OnsetState onset = (OnsetState) state;
+
+ if (!onset.diagnosed && this.name.equals(onset.targetEncounter)) {
+ onset.diagnose(person, time);
+ }
+ } else if (state instanceof Encounter && state.name.equals(this.name)) {
+ // a prior instance of hitting this same state. no need to go back any further
+ break;
}
}
}
diff --git a/src/test/java/org/mitre/synthea/engine/StateTest.java b/src/test/java/org/mitre/synthea/engine/StateTest.java
index a8e63d0fb..4c42dad72 100644
--- a/src/test/java/org/mitre/synthea/engine/StateTest.java
+++ b/src/test/java/org/mitre/synthea/engine/StateTest.java
@@ -151,6 +151,32 @@ public class StateTest {
verifyZeroInteractions(person.record);
}
+ @Test
+ public void condition_onset_diagnosed_by_target_encounter() {
+ Module module = getModule("condition_onset.json");
+
+ State condition = module.getState("Diabetes");
+ // Should pass through this state immediately without calling the record
+ assertTrue(condition.process(person, time));
+ person.history.add(0, condition);
+
+ // The encounter comes next (and add it to history);
+ State encounter = module.getState("ED_Visit");
+
+ assertTrue(encounter.process(person, time));
+ person.history.add(0, encounter);
+
+ assertEquals(1, person.record.encounters.size());
+ Encounter enc = person.record.encounters.get(0);
+ Code code = enc.codes.get(0);
+ assertEquals("50849002", code.code);
+ assertEquals("Emergency Room Admission", code.display);
+ assertEquals(1, enc.conditions.size());
+ code = enc.conditions.get(0).codes.get(0);
+ assertEquals("73211009", code.code);
+ assertEquals("Diabetes mellitus", code.display);
+ }
+
@Test
public void condition_onset_during_encounter() {
Module module = getModule("condition_onset.json"); | ['src/main/java/org/mitre/synthea/engine/State.java', 'src/test/java/org/mitre/synthea/engine/StateTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 615,087 | 140,903 | 16,526 | 48 | 640 | 144 | 12 | 1 | 680 | 78 | 159 | 7 | 2 | 0 | 2018-07-03T14:25:23 | 1,790 | Java | {'Java': 2754203, 'FreeMarker': 46623, 'JavaScript': 7344, 'Batchfile': 653, 'Shell': 573} | Apache License 2.0 |
480 | fabric8io/docker-maven-plugin/943/930 | fabric8io | docker-maven-plugin | https://github.com/fabric8io/docker-maven-plugin/issues/930 | https://github.com/fabric8io/docker-maven-plugin/pull/943 | https://github.com/fabric8io/docker-maven-plugin/pull/943 | 1 | fix | [BUG] Can't parse docker-credential-gcloud multiline output | ### Description
When using `docker-credential-gcloud`, the plugin fails to parse the JSON credentials returned by the helper.
```
[ERROR] Failed to execute goal io.fabric8:docker-maven-plugin:0.24-SNAPSHOT:build (default-build) on project test-docker-maven-plugin: Execution default-build of goal io.fabric8:docker-maven-plugin:0.24-SNAPSHOT:build failed: A JSONObject text must begin with '{' at 1 [character 2 line 1] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal io.fabric8:docker-maven-plugin:0.24-SNAPSHOT:build (default-build) on project airflow: Execution default-build of goal io.fabric8:docker-maven-plugin:0.24-SNAPSHOT:build failed: A JSONObject text must begin with '{' at 1 [character 2 line 1]
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:993)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:345)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:191)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.PluginExecutionException: Execution default-build of goal io.fabric8:docker-maven-plugin:0.24-SNAPSHOT:build failed: A JSONObject text must begin with '{' at 1 [character 2 line 1]
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:145)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:993)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:345)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:191)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.json.JSONException: A JSONObject text must begin with '{' at 1 [character 2 line 1]
at org.json.JSONTokener.syntaxError (JSONTokener.java:433)
at org.json.JSONObject.<init> (JSONObject.java:194)
at io.fabric8.maven.docker.util.CredentialHelperClient$GetCommand.getCredentialNode (CredentialHelperClient.java:111)
at io.fabric8.maven.docker.util.CredentialHelperClient.getAuthConfig (CredentialHelperClient.java:38)
at io.fabric8.maven.docker.util.AuthConfigFactory.extractAuthConfigFromCredentialsHelper (AuthConfigFactory.java:351)
at io.fabric8.maven.docker.util.AuthConfigFactory.getAuthConfigFromDockerConfig (AuthConfigFactory.java:320)
at io.fabric8.maven.docker.util.AuthConfigFactory.createAuthConfig (AuthConfigFactory.java:119)
at io.fabric8.maven.docker.service.RegistryService.createAuthConfig (RegistryService.java:139)
at io.fabric8.maven.docker.service.RegistryService.pullImageWithPolicy (RegistryService.java:98)
at io.fabric8.maven.docker.service.BuildService.autoPullBaseImage (BuildService.java:209)
at io.fabric8.maven.docker.service.BuildService.buildImage (BuildService.java:58)
at io.fabric8.maven.docker.BuildMojo.buildAndTag (BuildMojo.java:59)
at io.fabric8.maven.docker.BuildMojo.executeInternal (BuildMojo.java:44)
at io.fabric8.maven.docker.AbstractDockerMojo.execute (AbstractDockerMojo.java:220)
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:993)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:345)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:191)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
```
The helper works correctly when invoked standalone
```
giuseppe@zeug:~$ echo 'https://gcr.io' | docker-credential-gcloud get
{
"Secret": "[REDACTED]",
"Username": "oauth2accesstoken"
}
giuseppe@zeug:~$
```
The issue seems to arise from the ExternalCommand class, where the output of the commands are read/returned line by line.
The output of `docker-credential-gcloud` contains multilines, and this confuses the parser into using only the last line - and failing to find a valid json.
### Info
* d-m-p version: 0.24-SNAPSHOT (17/1/2018)
* Maven version (`mvn -v`):
```
Apache Maven 3.5.0
Maven home: /usr/share/maven
Java version: 1.8.0_151, vendor: Oracle Corporation
Java home: /usr/lib/jvm/java-8-openjdk-amd64/jre
Default locale: en_US, platform encoding: UTF-8
OS name: "linux", version: "4.13.0-21-generic", arch: "amd64", family: "unix"
```
* Docker version: Docker version 17.12.0-ce, build c97c6d6
* If it's a bug, how to reproduce: enable the `docker-credential-gcloud` and try to push/pull an image hosted in GCR | 90977d2e0e82ce0fd37f6612cd061f9f74e951ea | b75bbffebdd28af4c32840042c9814bbe7a5c306 | https://github.com/fabric8io/docker-maven-plugin/compare/90977d2e0e82ce0fd37f6612cd061f9f74e951ea...b75bbffebdd28af4c32840042c9814bbe7a5c306 | diff --git a/src/main/java/io/fabric8/maven/docker/util/CredentialHelperClient.java b/src/main/java/io/fabric8/maven/docker/util/CredentialHelperClient.java
index 6b222927..8dc66572 100644
--- a/src/main/java/io/fabric8/maven/docker/util/CredentialHelperClient.java
+++ b/src/main/java/io/fabric8/maven/docker/util/CredentialHelperClient.java
@@ -1,5 +1,7 @@
package io.fabric8.maven.docker.util;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
import io.fabric8.maven.docker.access.AuthConfig;
import io.fabric8.maven.docker.access.util.ExternalCommand;
import org.apache.maven.plugin.MojoExecutionException;
@@ -7,6 +9,7 @@ import org.json.JSONObject;
import org.json.JSONTokener;
import java.io.IOException;
+import java.util.List;
public class CredentialHelperClient {
@@ -82,7 +85,7 @@ public class CredentialHelperClient {
// echo <registryToLookup> | docker-credential-XXX get
private class GetCommand extends ExternalCommand {
- private String reply;
+ private List<String> reply = Lists.newLinkedList();
GetCommand() {
super(CredentialHelperClient.this.log);
@@ -95,7 +98,7 @@ public class CredentialHelperClient {
@Override
protected void processLine(String line) {
- reply = line;
+ reply.add(line);
}
public JSONObject getCredentialNode(String registryToLookup) throws IOException {
@@ -108,7 +111,7 @@ public class CredentialHelperClient {
throw ex;
}
}
- JSONObject credentials = new JSONObject(new JSONTokener(reply));
+ JSONObject credentials = new JSONObject(new JSONTokener(Joiner.on('\\n').join(reply)));
if (!credentials.has(SECRET_KEY) || !credentials.has(USERNAME_KEY)) {
return null;
} | ['src/main/java/io/fabric8/maven/docker/util/CredentialHelperClient.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 757,785 | 150,491 | 22,220 | 157 | 430 | 79 | 9 | 1 | 9,041 | 497 | 2,199 | 116 | 1 | 3 | 2018-02-14T13:01:39 | 1,777 | Java | {'Java': 1771506, 'Shell': 9406, 'Dockerfile': 2402, 'CMake': 997, 'Groovy': 764, 'C': 408} | Apache License 2.0 |
481 | fabric8io/docker-maven-plugin/940/911 | fabric8io | docker-maven-plugin | https://github.com/fabric8io/docker-maven-plugin/issues/911 | https://github.com/fabric8io/docker-maven-plugin/pull/940 | https://github.com/fabric8io/docker-maven-plugin/pull/940 | 1 | fix | NPE when logging to file | ### Description
When logging to a file and starting a container early in the build, a NullPointerException is thrown:
```
Exception in thread "Thread-1" java.lang.NullPointerException
at io.fabric8.maven.docker.log.DefaultLogCallback.ps(DefaultLogCallback.java:74)
at io.fabric8.maven.docker.log.DefaultLogCallback.error(DefaultLogCallback.java:84)
at io.fabric8.maven.docker.access.log.LogRequestor.run(LogRequestor.java:110)
```
### Info
* d-m-p version: 0.23.0
* Maven version (`mvn -v`):
```
Apache Maven 3.5.2 (138edd61fd100ec658bfa2d307c43b76940a5d7d; 2017-10-18T09:58:13+02:00)
Maven home: /home/hho/.sdkman/candidates/maven/current
Java version: 1.8.0_151, vendor: Oracle Corporation
Java home: /usr/lib/jvm/java-8-oracle/jre
Default locale: en_US, platform encoding: UTF-8
OS name: "linux", version: "4.13.0-19-generic", arch: "amd64", family: "unix"
```
* Docker version: 17.11.0-ce
* If it's a bug, how to reproduce:
1. Use the POM given below
2. run `mvn compile`
<details>
<summary>POM to reproduce</summary>
```xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>demo</groupId>
<artifactId>npedemo</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.23.0</version>
<configuration>
<images>

</images>
</configuration>
<executions>
<execution>
<id>start-db</id>
<phase>generate-sources</phase>
<goals><goal>start</goal></goals>
</execution>
<execution>
<id>stop-db</id>
<phase>generate-resources</phase>
<goals><goal>stop</goal></goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
```
</details>
<details>
<summary>Example Build Output</summary>
```
➜ mvn compile
[INFO] Scanning for projects...
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] Building npedemo 1.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO]
[INFO] --- docker-maven-plugin:0.23.0:start (start-db) @ npedemo ---
[INFO] DOCKER> [postgres:alpine]: Start container 69f9e99da85e
Exception in thread "Thread-1" java.lang.NullPointerException
at io.fabric8.maven.docker.log.DefaultLogCallback.ps(DefaultLogCallback.java:74)
at io.fabric8.maven.docker.log.DefaultLogCallback.error(DefaultLogCallback.java:84)
at io.fabric8.maven.docker.access.log.LogRequestor.run(LogRequestor.java:110)
[INFO]
[INFO] --- docker-maven-plugin:0.23.0:stop (stop-db) @ npedemo ---
[INFO] DOCKER> [postgres:alpine]: Stop and removed container 69f9e99da85e after 0 ms
[INFO]
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ npedemo ---
[WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent!
[INFO] skip non existing resourceDirectory /tmp/npedemo/src/main/resources
[INFO]
[INFO] --- maven-compiler-plugin:3.1:compile (default-compile) @ npedemo ---
[INFO] Changes detected - recompiling the module!
[WARNING] File encoding has not been set, using platform encoding UTF-8, i.e. build is platform dependent!
[INFO] Compiling 1 source file to /tmp/npedemo/target/classes
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 12.681 s
[INFO] Finished at: 2017-12-14T17:28:47+01:00
[INFO] Final Memory: 21M/360M
[INFO] ------------------------------------------------------------------------
```
</details>
| d2a7861bb14b122428d98a491ea1c37f8401f20f | bbb0a6731bc08117381f1afc361c37e9ca9a1b3b | https://github.com/fabric8io/docker-maven-plugin/compare/d2a7861bb14b122428d98a491ea1c37f8401f20f...bbb0a6731bc08117381f1afc361c37e9ca9a1b3b | diff --git a/src/main/java/io/fabric8/maven/docker/log/DefaultLogCallback.java b/src/main/java/io/fabric8/maven/docker/log/DefaultLogCallback.java
index f46c7372..ebcef931 100644
--- a/src/main/java/io/fabric8/maven/docker/log/DefaultLogCallback.java
+++ b/src/main/java/io/fabric8/maven/docker/log/DefaultLogCallback.java
@@ -19,6 +19,7 @@ import java.io.*;
import java.util.HashMap;
import java.util.Map;
+import com.google.common.io.Files;
import io.fabric8.maven.docker.access.log.LogCallback;
import io.fabric8.maven.docker.util.Timestamp;
@@ -46,6 +47,7 @@ public class DefaultLogCallback implements LogCallback {
} else {
SharedPrintStream cachedPs = printStreamMap.get(file);
if (cachedPs == null) {
+ Files.createParentDirs(new File(file));
PrintStream ps = new PrintStream(new FileOutputStream(file), true);
cachedPs = new SharedPrintStream(ps);
printStreamMap.put(file, cachedPs);
diff --git a/src/main/java/io/fabric8/maven/docker/log/SharedPrintStream.java b/src/main/java/io/fabric8/maven/docker/log/SharedPrintStream.java
index 5b0bab48..78cf20d0 100644
--- a/src/main/java/io/fabric8/maven/docker/log/SharedPrintStream.java
+++ b/src/main/java/io/fabric8/maven/docker/log/SharedPrintStream.java
@@ -24,7 +24,7 @@ class SharedPrintStream {
boolean close() {
int nrUsers = numUsers.decrementAndGet();
if (nrUsers == 0 && printStream != System.out) {
- printStream.close();;
+ printStream.close();
return true;
} else {
return false;
diff --git a/src/test/java/io/fabric8/maven/docker/log/DefaultLogCallbackTest.java b/src/test/java/io/fabric8/maven/docker/log/DefaultLogCallbackTest.java
index 9440d914..ed8b273f 100644
--- a/src/test/java/io/fabric8/maven/docker/log/DefaultLogCallbackTest.java
+++ b/src/test/java/io/fabric8/maven/docker/log/DefaultLogCallbackTest.java
@@ -5,6 +5,7 @@ import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
import java.io.*;
import java.util.*;
@@ -12,6 +13,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
+import com.google.common.io.Files;
import org.apache.maven.shared.utils.io.FileUtils;
import org.junit.Before;
import org.junit.Test;
@@ -147,6 +149,18 @@ public class DefaultLogCallbackTest {
assertThat(indexes, is(empty()));
}
+ @Test
+ public void shouldCreateParentDirs() throws IOException {
+ File dir = Files.createTempDir();
+ dir.deleteOnExit();
+ file = new File(dir, "non/existing/dirs/file.log");
+ spec = new LogOutputSpec.Builder().prefix("callback-test> ")
+ .file(file.toString()).build();
+ callback = new DefaultLogCallback(spec);
+ callback.open();
+ assertTrue(file.exists());
+ }
+
private class LoggerTask implements Runnable {
private LogCallback cb; | ['src/main/java/io/fabric8/maven/docker/log/DefaultLogCallback.java', 'src/test/java/io/fabric8/maven/docker/log/DefaultLogCallbackTest.java', 'src/main/java/io/fabric8/maven/docker/log/SharedPrintStream.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 757,691 | 150,476 | 22,218 | 157 | 164 | 27 | 4 | 2 | 4,310 | 305 | 1,145 | 118 | 4 | 4 | 2018-02-07T21:08:32 | 1,777 | Java | {'Java': 1771506, 'Shell': 9406, 'Dockerfile': 2402, 'CMake': 997, 'Groovy': 764, 'C': 408} | Apache License 2.0 |
149 | naver/ngrinder/189/184 | naver | ngrinder | https://github.com/naver/ngrinder/issues/184 | https://github.com/naver/ngrinder/pull/189 | https://github.com/naver/ngrinder/pull/189 | 1 | fix | Failed to launch ngrinder 3.4 | I've been tried to use ngrinder on ec2 ubuntu 14.04.
I tried 1) war file standalone deployment and 2) deployment with tomcat 7 and both cases failed with same errors. Also, I tried multiple java/tomcat versions and all failed.
Strangely, when I tried it again in another clean digitalocean machine and my local osx with the same environment, it worked.
I guess that anyone can easily reproduce this error because the same thing happens again even on the newly launched ec2 instance.
Please let me know if i'm missing something here.
## Followings are the stack traces
org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'NGrinderDefaultExtensionFinder' defined in file [/home/ubuntu/.ngrinder/tmp/webapp/WEB-INF/classes/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.class]: Unsatisfied dependency expressed through constructor argument with index 0 of type [ro.fortsoft.pf4j.PluginManager]: Error creating bean with name 'NGrinderDefaultPluginManager': Injection of autowired dependencies failed; nested exception is org.springframework.beans.factory.BeanCreationException: Could not autowire method: public void org.ngrinder.infra.plugin.extension.NGrinderDefaultPluginManager.setExtensionFinder(ro.fortsoft.pf4j.ExtensionFinder); nested exception is org.springframework.beans.factory.BeanCurrentlyInCreationException: Error creating bean with name 'NGrinderDefaultExtensionFinder': Requested bean is currently in creation: Is there an unresolvable circular reference?; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'NGrinderDefaultPluginManager': Injection of autowired dependencies failed; nested exception is org.springframework.beans.factory.BeanCreationException: Could not autowire method: public void org.ngrinder.infra.plugin.extension.NGrinderDefaultPluginManager.setExtensionFinder(ro.fortsoft.pf4j.ExtensionFinder); nested exception is org.springframework.beans.factory.BeanCurrentlyInCreationException: Error creating bean with name 'NGrinderDefaultExtensionFinder': Requested bean is currently in creation: Is there an unresolvable circular reference?
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:749)
at org.springframework.beans.factory.support.ConstructorResolver.autowireConstructor(ConstructorResolver.java:185)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.autowireConstructor(AbstractAutowireCapableBeanFactory.java:1143)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1046)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:510)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:482)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:772)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:839)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:538)
at org.springframework.web.context.ContextLoader.configureAndRefreshWebApplicationContext(ContextLoader.java:444)
at org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:326)
at org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:107)
at org.eclipse.jetty.server.handler.ContextHandler.callContextInitialized(ContextHandler.java:800)
at org.eclipse.jetty.servlet.ServletContextHandler.callContextInitialized(ServletContextHandler.java:444)
at org.eclipse.jetty.server.handler.ContextHandler.startContext(ContextHandler.java:791)
at org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:294)
at org.eclipse.jetty.webapp.WebAppContext.startWebapp(WebAppContext.java:1349)
at org.eclipse.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1342)
at org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:741)
at org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:505)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:132)
at org.eclipse.jetty.server.Server.start(Server.java:387)
at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:114)
at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:61)
at org.eclipse.jetty.server.Server.doStart(Server.java:354)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
at org.ngrinder.NGrinderControllerStarter.run(NGrinderControllerStarter.java:236)
at org.ngrinder.NGrinderControllerStarter.main(NGrinderControllerStarter.java:310)
Caused by:
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'NGrinderDefaultPluginManager': Injection of autowired dependencies failed; nested exception is org.springframework.beans.factory.BeanCreationException: Could not autowire method: public void org.ngrinder.infra.plugin.extension.NGrinderDefaultPluginManager.setExtensionFinder(ro.fortsoft.pf4j.ExtensionFinder); nested exception is org.springframework.beans.factory.BeanCurrentlyInCreationException: Error creating bean with name 'NGrinderDefaultExtensionFinder': Requested bean is currently in creation: Is there an unresolvable circular reference?
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessPropertyValues(AutowiredAnnotationBeanPostProcessor.java:334)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1214)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:543)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:482)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.findAutowireCandidates(DefaultListableBeanFactory.java:1192)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1116)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1014)
at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:813)
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:741)
at org.springframework.beans.factory.support.ConstructorResolver.autowireConstructor(ConstructorResolver.java:185)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.autowireConstructor(AbstractAutowireCapableBeanFactory.java:1143)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1046)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:510)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:482)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:772)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.findAutowireCandidates(DefaultListableBeanFactory.java:1192)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1116)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1014)
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredMethodElement.inject(AutowiredAnnotationBeanPostProcessor.java:618)
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:88)
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessPropertyValues(AutowiredAnnotationBeanPostProcessor.java:331)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1214)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:543)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:482)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.findAutowireCandidates(DefaultListableBeanFactory.java:1192)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1116)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1014)
at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:813)
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:741)
at org.springframework.beans.factory.support.ConstructorResolver.autowireConstructor(ConstructorResolver.java:185)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.autowireConstructor(AbstractAutowireCapableBeanFactory.java:1143)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1046)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:510)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:482)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:772)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:839)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:538)
at org.springframework.web.context.ContextLoader.configureAndRefreshWebApplicationContext(ContextLoader.java:444)
at org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:326)
at org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:107)
at org.eclipse.jetty.server.handler.ContextHandler.callContextInitialized(ContextHandler.java:800)
at org.eclipse.jetty.servlet.ServletContextHandler.callContextInitialized(ServletContextHandler.java:444)
at org.eclipse.jetty.server.handler.ContextHandler.startContext(ContextHandler.java:791)
at org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:294)
at org.eclipse.jetty.webapp.WebAppContext.startWebapp(WebAppContext.java:1349)
at org.eclipse.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1342)
at org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:741)
at org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:505)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:132)
at org.eclipse.jetty.server.Server.start(Server.java:387)
at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:114)
at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:61)
at org.eclipse.jetty.server.Server.doStart(Server.java:354)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
at org.ngrinder.NGrinderControllerStarter.run(NGrinderControllerStarter.java:236)
at org.ngrinder.NGrinderControllerStarter.main(NGrinderControllerStarter.java:310)
| c522c91d1f477fd78dea09efb205ff3b1e35f3fa | f820dc6cfd0560a0c0d020192c1d1c5c42497e31 | https://github.com/naver/ngrinder/compare/c522c91d1f477fd78dea09efb205ff3b1e35f3fa...f820dc6cfd0560a0c0d020192c1d1c5c42497e31 | diff --git a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/PluginManager.java b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/PluginManager.java
index 09e97e450..0cce37340 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/PluginManager.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/PluginManager.java
@@ -1,4 +1,4 @@
-/*
+/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
@@ -9,21 +9,23 @@
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
- * limitations under the License.
+ * limitations under the License.
*/
package org.ngrinder.infra.plugin;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.annotation.PostConstruct;
-
import org.ngrinder.infra.config.Config;
+import org.ngrinder.infra.plugin.extension.NGrinderDefaultPluginManager;
+import org.ngrinder.infra.plugin.extension.NGrinderSpringExtensionFactory;
+import org.ngrinder.infra.plugin.finder.NGrinderDefaultExtensionFinder;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
+import ro.fortsoft.pf4j.spring.SpringExtensionFactory;
-import ro.fortsoft.pf4j.DefaultPluginManager;
+import javax.annotation.PostConstruct;
+import java.util.ArrayList;
+import java.util.List;
/**
* Plugin manager which is responsible to initialize the plugin infra.<br/>
@@ -40,9 +42,10 @@ public class PluginManager {
@Autowired
private Config config;
- @Autowired
- private DefaultPluginManager manager;
+ private NGrinderDefaultPluginManager manager;
+ @Autowired
+ private ApplicationContext applicationContext;
/**
* Initialize plugin component.
*/
@@ -58,6 +61,9 @@ public class PluginManager {
* Initialize Plugin Framework.
*/
public void initPluginFramework() {
+ manager = new NGrinderDefaultPluginManager(config, applicationContext);
+ manager.setExtensionFinder(new NGrinderDefaultExtensionFinder(manager));
+ manager.setSpringExtensionFactory(new NGrinderSpringExtensionFactory(manager, applicationContext));
manager.loadPlugins();
manager.startPlugins();
}
diff --git a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
index f3af53965..ce2c7b73a 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
@@ -1,64 +1,60 @@
-package org.ngrinder.infra.plugin.extension;
-
-import java.net.MalformedURLException;
-
-import org.ngrinder.infra.config.Config;
-import org.ngrinder.infra.plugin.finder.NGrinderPluginClasspath;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.ApplicationContext;
-import org.springframework.stereotype.Component;
-
-import ro.fortsoft.pf4j.DefaultPluginManager;
-import ro.fortsoft.pf4j.DefaultPluginRepository;
-import ro.fortsoft.pf4j.DevelopmentPluginClasspath;
-import ro.fortsoft.pf4j.ExtensionFactory;
-import ro.fortsoft.pf4j.ExtensionFinder;
-import ro.fortsoft.pf4j.PluginClasspath;
-import ro.fortsoft.pf4j.RuntimeMode;
-import ro.fortsoft.pf4j.spring.SpringExtensionFactory;
-import ro.fortsoft.pf4j.util.JarFileFilter;
-
-/**
- * DefaultPluginManager extended class.
- *
- * @author Gisoo Gwon ,GeunWoo Son
- * @see https://github.com/decebals/pf4j
- * @since 3.0
- */
-@Component
-public class NGrinderDefaultPluginManager extends DefaultPluginManager {
-
- @Autowired
- public NGrinderDefaultPluginManager(Config config, ApplicationContext applicationContext) throws MalformedURLException {
- super(config.isClustered() ? config.getExHome().getPluginsCacheDirectory() : config.getHome().getPluginsCacheDirectory());
- super.pluginRepository = new DefaultPluginRepository(config.getHome().getPluginsDirectory(), new JarFileFilter());
- }
-
- @Autowired
- public void setExtensionFinder(ExtensionFinder extensionFinder) {
- super.extensionFinder = extensionFinder;
- }
-
- @Autowired
- public void setSpringExtensionFactory(SpringExtensionFactory extensionFactory) {
- super.extensionFactory = extensionFactory;
- }
-
- @Override
- protected PluginClasspath createPluginClasspath() {
- return new NGrinderPluginClasspath();
- }
-
- @Override
- protected ExtensionFactory createExtensionFactory() {
- // Disable the default Factory
- return null;
- }
-
- @Override
- protected ExtensionFinder createExtensionFinder() {
- // Disable the default finder
- return null;
- }
-
-}
+package org.ngrinder.infra.plugin.extension;
+
+import java.net.MalformedURLException;
+
+import org.ngrinder.infra.config.Config;
+import org.ngrinder.infra.plugin.finder.NGrinderPluginClasspath;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.stereotype.Component;
+
+import ro.fortsoft.pf4j.DefaultPluginManager;
+import ro.fortsoft.pf4j.DefaultPluginRepository;
+import ro.fortsoft.pf4j.DevelopmentPluginClasspath;
+import ro.fortsoft.pf4j.ExtensionFactory;
+import ro.fortsoft.pf4j.ExtensionFinder;
+import ro.fortsoft.pf4j.PluginClasspath;
+import ro.fortsoft.pf4j.RuntimeMode;
+import ro.fortsoft.pf4j.spring.SpringExtensionFactory;
+import ro.fortsoft.pf4j.util.JarFileFilter;
+
+/**
+ * DefaultPluginManager extended class.
+ *
+ * @author Gisoo Gwon ,GeunWoo Son
+ * @see https://github.com/decebals/pf4j
+ * @since 3.0
+ */
+public class NGrinderDefaultPluginManager extends DefaultPluginManager {
+
+ public void setExtensionFinder(ExtensionFinder extensionFinder) {
+ super.extensionFinder = extensionFinder;
+ }
+
+ public NGrinderDefaultPluginManager(Config config, ApplicationContext applicationContext) {
+ super(config.isClustered() ? config.getExHome().getPluginsCacheDirectory() : config.getHome().getPluginsCacheDirectory());
+ super.pluginRepository = new DefaultPluginRepository(config.getHome().getPluginsDirectory(), new JarFileFilter());
+ }
+
+ public void setSpringExtensionFactory(SpringExtensionFactory extensionFactory) {
+ super.extensionFactory = extensionFactory;
+ }
+
+ @Override
+ protected PluginClasspath createPluginClasspath() {
+ return new NGrinderPluginClasspath();
+ }
+
+ @Override
+ protected ExtensionFactory createExtensionFactory() {
+ // Disable the default Factory
+ return null;
+ }
+
+ @Override
+ protected ExtensionFinder createExtensionFinder() {
+ // Disable the default finder
+ return null;
+ }
+
+}
diff --git a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderSpringExtensionFactory.java b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderSpringExtensionFactory.java
index 8ddbfd0d7..d15c0c470 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderSpringExtensionFactory.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderSpringExtensionFactory.java
@@ -1,51 +1,45 @@
-package org.ngrinder.infra.plugin.extension;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.ApplicationContext;
-import org.springframework.stereotype.Component;
-
-import ro.fortsoft.pf4j.Plugin;
-import ro.fortsoft.pf4j.PluginManager;
-import ro.fortsoft.pf4j.PluginWrapper;
-import ro.fortsoft.pf4j.spring.SpringExtensionFactory;
-import ro.fortsoft.pf4j.spring.SpringPlugin;
-
-/**
- * SpringExtensionFactory extended class.
- * The springframework ApplicationContext injection.
- *
- * @author Gisoo Gwon ,GeunWoo Son
- * @see https://github.com/decebals/pf4j-spring
- * @since 3.0
- */
-@Component
-public class NGrinderSpringExtensionFactory extends SpringExtensionFactory {
-
- private final PluginManager pluginManager;
-
- @Autowired
- private ApplicationContext applicationContext;
-
- @Autowired
- public NGrinderSpringExtensionFactory(PluginManager pluginManager) {
- super(pluginManager);
- this.pluginManager = pluginManager;
- }
-
- protected void setApplicationContext(ApplicationContext applicationContext) {
- this.applicationContext = applicationContext;
- }
-
- @Override
- public Object create(Class<?> extensionClass) {
- Object extension = createWithoutSpring(extensionClass);
- if (extension != null) {
- PluginWrapper pluginWrapper = pluginManager.whichPlugin(extensionClass);
- if (pluginWrapper != null) {
- applicationContext.getAutowireCapableBeanFactory().autowireBean(extension);
- }
- }
- return extension;
- }
-
-}
+package org.ngrinder.infra.plugin.extension;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.stereotype.Component;
+
+import ro.fortsoft.pf4j.Plugin;
+import ro.fortsoft.pf4j.PluginManager;
+import ro.fortsoft.pf4j.PluginWrapper;
+import ro.fortsoft.pf4j.spring.SpringExtensionFactory;
+import ro.fortsoft.pf4j.spring.SpringPlugin;
+
+/**
+ * SpringExtensionFactory extended class.
+ * The springframework ApplicationContext injection.
+ *
+ * @author Gisoo Gwon ,GeunWoo Son
+ * @see https://github.com/decebals/pf4j-spring
+ * @since 3.0
+ */
+public class NGrinderSpringExtensionFactory extends SpringExtensionFactory {
+
+ private final PluginManager pluginManager;
+
+ private ApplicationContext applicationContext;
+
+ public NGrinderSpringExtensionFactory(PluginManager pluginManager, ApplicationContext applicationContext) {
+ super(pluginManager);
+ this.pluginManager = pluginManager;
+ this.applicationContext = applicationContext;
+ }
+
+ @Override
+ public Object create(Class<?> extensionClass) {
+ Object extension = createWithoutSpring(extensionClass);
+ if (extension != null) {
+ PluginWrapper pluginWrapper = pluginManager.whichPlugin(extensionClass);
+ if (pluginWrapper != null) {
+ applicationContext.getAutowireCapableBeanFactory().autowireBean(extension);
+ }
+ }
+ return extension;
+ }
+
+}
diff --git a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.java b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.java
index 0bf22021c..8e1e14d30 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.java
@@ -1,31 +1,40 @@
-package org.ngrinder.infra.plugin.finder;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-import ro.fortsoft.pf4j.DefaultExtensionFinder;
-import ro.fortsoft.pf4j.PluginManager;
-
-/**
- * DefaultExtensionFinder extended class.
- * Connect with Finder.
- *
- * @author Gisoo Gwon ,GeunWoo Son
- * @see https://github.com/decebals/pf4j-spring
- * @since 3.0
- */
-@Component
-public class NGrinderDefaultExtensionFinder extends DefaultExtensionFinder{
-
- @Autowired
- public NGrinderDefaultExtensionFinder(PluginManager pluginManager) {
- super(pluginManager);
- finders.add(new NGrinderServiceProviderExtensionFinder(pluginManager));
- }
-
- @Override
- protected void addDefaults(PluginManager pluginManager) {
- // Disable the default ProviderExtensionFinder
- }
-
-}
+package org.ngrinder.infra.plugin.finder;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import ro.fortsoft.pf4j.DefaultExtensionFinder;
+import ro.fortsoft.pf4j.ExtensionFinder;
+import ro.fortsoft.pf4j.ExtensionWrapper;
+import ro.fortsoft.pf4j.PluginManager;
+
+import java.util.List;
+import java.util.Set;
+
+/**
+ * DefaultExtensionFinder extended class.
+ * Connect with Finder.
+ *
+ * @author Gisoo Gwon ,GeunWoo Son
+ * @see https://github.com/decebals/pf4j-spring
+ * @since 3.0
+ */
+public class NGrinderDefaultExtensionFinder implements ExtensionFinder {
+
+ private NGrinderServiceProviderExtensionFinder finder;
+
+ public NGrinderDefaultExtensionFinder(PluginManager pluginManager) {
+ finder = new NGrinderServiceProviderExtensionFinder(pluginManager);
+ }
+
+
+ @Override
+ public <T> List<ExtensionWrapper<T>> find(Class<T> type) {
+ return finder.find(type);
+ }
+
+ @Override
+ public Set<String> findClassNames(String pluginId) {
+ return finder.findClassNames(pluginId);
+ }
+} | ['ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderSpringExtensionFactory.java', 'ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/finder/NGrinderDefaultExtensionFinder.java', 'ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java', 'ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/PluginManager.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 1,206,177 | 276,817 | 40,085 | 357 | 10,112 | 2,242 | 317 | 4 | 15,848 | 477 | 2,962 | 120 | 0 | 0 | 2016-07-01T15:12:55 | 1,758 | Java | {'Java': 2131561, 'Vue': 441893, 'JavaScript': 33073, 'FreeMarker': 9845, 'Groovy': 7693, 'Python': 4818, 'CSS': 4721, 'HTML': 2433, 'Shell': 2240, 'Dockerfile': 2017, 'Batchfile': 1004} | Apache License 2.0 |
150 | naver/ngrinder/155/154 | naver | ngrinder | https://github.com/naver/ngrinder/issues/154 | https://github.com/naver/ngrinder/pull/155 | https://github.com/naver/ngrinder/pull/155 | 1 | fix | Fix plugins loading conflict in the clustered mode | When multiple instances in clustered mode boot up at the same time, each instances tries to expand the plugin jar file at the same time and at the same place. This causes plugin loading error.
| 488fbf0d406b8afa25619edf1d58e81cf6144c38 | bb1140363a0c933d623a9517485b533236079875 | https://github.com/naver/ngrinder/compare/488fbf0d406b8afa25619edf1d58e81cf6144c38...bb1140363a0c933d623a9517485b533236079875 | diff --git a/ngrinder-controller/src/main/java/org/ngrinder/common/model/Home.java b/ngrinder-controller/src/main/java/org/ngrinder/common/model/Home.java
index a35dd0aa9..588fe4fb9 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/common/model/Home.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/common/model/Home.java
@@ -99,15 +99,6 @@ public class Home {
return directory;
}
- /**
- * Get the plugin directory.
- *
- * @return the plugin directory
- */
- public File getPluginDirectory() {
- return new File(directory, PATH_PLUGIN);
- }
-
/**
* Copy the given file from given location.
*
@@ -182,7 +173,9 @@ public class Home {
* @return plugin cache directory.
*/
public File getPluginsCacheDirectory() {
- return getSubFile(PATH_PLUGIN + "_cache");
+ File cacheDir = getSubFile(PATH_PLUGIN + "_cache");
+ cacheDir.mkdirs();
+ return cacheDir;
}
/**
diff --git a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
index 1aeb08f0d..f3af53965 100644
--- a/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
+++ b/ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java
@@ -30,8 +30,8 @@ public class NGrinderDefaultPluginManager extends DefaultPluginManager {
@Autowired
public NGrinderDefaultPluginManager(Config config, ApplicationContext applicationContext) throws MalformedURLException {
- super(config.getHome().getPluginsDirectory());
- super.pluginRepository = new DefaultPluginRepository(pluginsDirectory, new JarFileFilter());
+ super(config.isClustered() ? config.getExHome().getPluginsCacheDirectory() : config.getHome().getPluginsCacheDirectory());
+ super.pluginRepository = new DefaultPluginRepository(config.getHome().getPluginsDirectory(), new JarFileFilter());
}
@Autowired
| ['ngrinder-controller/src/main/java/org/ngrinder/common/model/Home.java', 'ngrinder-controller/src/main/java/org/ngrinder/infra/plugin/extension/NGrinderDefaultPluginManager.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,205,968 | 276,762 | 40,089 | 357 | 705 | 156 | 17 | 2 | 194 | 35 | 39 | 2 | 0 | 0 | 2016-05-13T06:39:17 | 1,758 | Java | {'Java': 2131561, 'Vue': 441893, 'JavaScript': 33073, 'FreeMarker': 9845, 'Groovy': 7693, 'Python': 4818, 'CSS': 4721, 'HTML': 2433, 'Shell': 2240, 'Dockerfile': 2017, 'Batchfile': 1004} | Apache License 2.0 |
3,546 | hapifhir/hapi-fhir/2953/2952 | hapifhir | hapi-fhir | https://github.com/hapifhir/hapi-fhir/issues/2952 | https://github.com/hapifhir/hapi-fhir/pull/2953 | https://github.com/hapifhir/hapi-fhir/pull/2953 | 1 | closes | TestUtil should check Embeddable not just Entity | TestUtil.scanEntities() only considers Entity classes, need to add also Embeddable classes | fed755523fb0663810235d19608553f5d60c21b8 | b29a61df4096bd49a1eb9432e742970e44a6bfd9 | https://github.com/hapifhir/hapi-fhir/compare/fed755523fb0663810235d19608553f5d60c21b8...b29a61df4096bd49a1eb9432e742970e44a6bfd9 | diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
index f878494e559..3b1ed6553fd 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
@@ -35,6 +35,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.InstantType;
import javax.persistence.Column;
+import javax.persistence.Embeddable;
import javax.persistence.Embedded;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
@@ -106,7 +107,8 @@ public class TestUtil {
for (ClassInfo classInfo : classes) {
Class<?> clazz = Class.forName(classInfo.getName());
Entity entity = clazz.getAnnotation(Entity.class);
- if (entity == null) {
+ Embeddable embeddable = clazz.getAnnotation(Embeddable.class);
+ if (entity == null && embeddable == null) {
continue;
}
| ['hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 10,514,961 | 2,463,798 | 295,809 | 2,090 | 178 | 47 | 4 | 1 | 91 | 11 | 19 | 1 | 0 | 0 | 2021-09-03T14:42:36 | 1,733 | Java | {'Java': 30495802, 'HTML': 268913, 'Ruby': 230677, 'Shell': 46167, 'JavaScript': 32124, 'GAP': 25037, 'CSS': 11872, 'Kotlin': 3951, 'Batchfile': 3861} | Apache License 2.0 |
3,547 | hapifhir/hapi-fhir/2776/2775 | hapifhir | hapi-fhir | https://github.com/hapifhir/hapi-fhir/issues/2775 | https://github.com/hapifhir/hapi-fhir/pull/2776 | https://github.com/hapifhir/hapi-fhir/pull/2776 | 1 | closes | Address interceptor retains previous values | **Describe the bug**
Address interceptor retains previous values, but it should not | 4bb495e9d717c64e611ab0f93483c44b8445d33e | 7d4c39b9530784b69929c7c042d26de8515cda80 | https://github.com/hapifhir/hapi-fhir/compare/4bb495e9d717c64e611ab0f93483c44b8445d33e...7d4c39b9530784b69929c7c042d26de8515cda80 | diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java
index f00bfb292d3..d76b3f83656 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java
@@ -165,6 +165,8 @@ public class AddressValidatingInterceptor {
}
protected boolean validateAddress(IBase theAddress, FhirContext theFhirContext) {
+ ExtensionUtil.clearExtensionsByUrl(theAddress, getExtensionUrl());
+
try {
AddressValidationResult validationResult = getAddressValidator().isValid(theAddress, theFhirContext);
ourLog.debug("Validated address {}", validationResult);
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptorTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptorTest.java
index da7e98fbdcd..8a79791d8a9 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptorTest.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptorTest.java
@@ -33,6 +33,7 @@ import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -235,6 +236,33 @@ class AddressValidatingInterceptorTest {
assertValidationErrorValue(person.getAddress().get(1), "true");
}
+ @Test
+ void validateOnValidInvalid() {
+ Address address = new Address();
+ address.addLine("Line");
+ address.setCity("City");
+
+ Person person = new Person();
+ person.addAddress(address);
+
+ AddressValidationResult validationResult = new AddressValidationResult();
+ validationResult.setValid(true);
+ when(myValidator.isValid(eq(address), any())).thenReturn(validationResult);
+ myInterceptor.resourcePreUpdate(myRequestDetails, null, person);
+
+ assertValidationErrorValue(person.getAddress().get(0), "false");
+
+ when(myValidator.isValid(eq(address), any())).thenThrow(new RuntimeException());
+
+ myInterceptor.resourcePreUpdate(myRequestDetails, null, person);
+
+ Extension ext = assertValidationErrorExtension(address);
+ assertNotNull(ext);
+ assertNull(ext.getValue());
+ assertTrue(ext.hasExtension());
+
+ }
+
public static class TestAddressValidator implements IAddressValidator {
@Override
public AddressValidationResult isValid(IBase theAddress, FhirContext theFhirContext) throws AddressValidationException { | ['hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java', 'hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptorTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 10,482,871 | 2,456,391 | 295,446 | 2,100 | 71 | 14 | 2 | 1 | 86 | 12 | 16 | 3 | 0 | 0 | 2021-07-02T22:19:02 | 1,733 | Java | {'Java': 30495802, 'HTML': 268913, 'Ruby': 230677, 'Shell': 46167, 'JavaScript': 32124, 'GAP': 25037, 'CSS': 11872, 'Kotlin': 3951, 'Batchfile': 3861} | Apache License 2.0 |
917 | gchq/gaffer/1115/1112 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/1112 | https://github.com/gchq/Gaffer/pull/1115 | https://github.com/gchq/Gaffer/pull/1115 | 1 | fixed | Parquet store fails to addElements when using HDFS | The final task for the addElements operations is to move the data from the tempDir/sorted to the dataDir/timestamp, which works fine when working on the local file system. However if you are working on HDFS then the files will be moved to dataDir/timestamp/sorted. | 337d00a3f0346bd3452c44cc9e380c653f4498c2 | 793260b9fccada40daad043b8c5e7a6d9e65a7e7 | https://github.com/gchq/gaffer/compare/337d00a3f0346bd3452c44cc9e380c653f4498c2...793260b9fccada40daad043b8c5e7a6d9e65a7e7 | diff --git a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/AddElementsHandler.java b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/AddElementsHandler.java
index 2e69c31142..bc8cf94b84 100755
--- a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/AddElementsHandler.java
+++ b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/AddElementsHandler.java
@@ -111,7 +111,7 @@ public class AddElementsHandler implements OperationHandler<AddElements> {
final long snapshot = System.currentTimeMillis();
final String destPath = dataDirString + "/" + snapshot;
LOGGER.debug("Creating directory {}", destPath);
- fs.mkdirs(new Path(destPath));
+ fs.mkdirs(new Path(destPath).getParent());
final String tempPath = tempDataDirString + "/" + ParquetStoreConstants.SORTED;
LOGGER.debug("Renaming {} to {}", tempPath, destPath);
fs.rename(new Path(tempPath), new Path(destPath));
diff --git a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/ImportRDDOfElementsHandler.java b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/ImportRDDOfElementsHandler.java
index 25dc58c4c7..0d37fd93ca 100644
--- a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/ImportRDDOfElementsHandler.java
+++ b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/ImportRDDOfElementsHandler.java
@@ -105,7 +105,7 @@ public class ImportRDDOfElementsHandler implements OperationHandler<ImportRDDOfE
// Move data from temp to data
final long snapshot = System.currentTimeMillis();
final String destPath = rootDataDirString + "/" + snapshot;
- fs.mkdirs(new Path(destPath));
+ fs.mkdirs(new Path(destPath).getParent());
fs.rename(new Path(tempDataDirString + "/" + ParquetStoreConstants.SORTED), new Path(destPath));
// Reload indices
newGraphIndex.setSnapshotTimestamp(snapshot); | ['store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/AddElementsHandler.java', 'store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/addelements/handler/ImportRDDOfElementsHandler.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,012,870 | 615,519 | 77,013 | 777 | 182 | 36 | 4 | 2 | 264 | 43 | 59 | 1 | 0 | 0 | 2017-07-27T12:32:37 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
912 | gchq/gaffer/2902/2886 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/2886 | https://github.com/gchq/Gaffer/pull/2902 | https://github.com/gchq/Gaffer/pull/2902 | 1 | resolve | FederatedStore Double Cache Collision Bug | **Describe the bug**
Having FederatedStore Rest service with no propterty gaffer.cache.service.name.suffix specified and then a addGraph for a second FederatedStore with no suffix, results in a Cache collision. Both graphs are the same. (Also a suffix could have been provided and be duplicated)
In a Graph as Service situation multiple user may add additional federated graph without a suffix and all have cache collision. This could expose inner graphs with weak/no security believing it was protected by the enclosing sub-federated graph. (equally suffix could have been provided and duplicated)
```
Rest FederatedStore{
FedStoreA{
owningUser=userA
graphId = "FedFoo"
suffix = "foo"
cache = "FederatedCachefoo"
graphs{
graphAWithNoSecurityMapStore{}
}
}
FedStoreB{
owningUser=userB
graphId = "CheatingGraph"
suffix = "foo"
cache = "FederatedCachefoo"
graphs{
graphAWithNoSecurityMapStore{} <---- exposed/stolen/duplicated
}
}
}
```
**To Reproduce**
Steps to reproduce the behavior:
Create FederatedStore Rest service with no specified suffix gaffer.cache.service.name.suffix in properties.
AddGraph FederatedStore with no specified suffix gaffer.cache.service.name.suffix in properties.
Run a FedereatedOperation{GetAllGraphIds} which goes to the subgraph.
subgraph return the graphId of itself because the two FederatedStore are the same due to a cache name collision.
Additional context
This maybe expected behaviour because the stores have been misconfigured to use the same cache.
But the possibly exploit to expose the contents of another FederatedStore in the same RestService seems incorrect.
No infinite loops should occur.
**Possible Workarounds**
Optional AddGraph Hooks to mitigate against the use of not providing a suffix
FederatedStore to mitigate against cache name collision in the same JVM, which does not effect load balancing of FederatedStore.
Impact
No one this is an Alpha 4 issue. | 5fcab094445a5e406aae56d14aae9b4ca69826bf | 8f63db7301e8de281759c30a7631be500c8a0b1a | https://github.com/gchq/gaffer/compare/5fcab094445a5e406aae56d14aae9b4ca69826bf...8f63db7301e8de281759c30a7631be500c8a0b1a | diff --git a/core/operation/src/main/java/uk/gov/gchq/gaffer/jobtracker/JobTracker.java b/core/operation/src/main/java/uk/gov/gchq/gaffer/jobtracker/JobTracker.java
index eefeb4f623..17cbcaa42c 100644
--- a/core/operation/src/main/java/uk/gov/gchq/gaffer/jobtracker/JobTracker.java
+++ b/core/operation/src/main/java/uk/gov/gchq/gaffer/jobtracker/JobTracker.java
@@ -34,7 +34,7 @@ import static java.util.Objects.nonNull;
*/
public class JobTracker extends Cache<String, JobDetail> {
- public static final String CACHE_SERVICE_NAME_PREFIX = "JobTracker";
+ private static final String CACHE_SERVICE_NAME_PREFIX = "JobTracker";
public JobTracker() {
this(null);
diff --git a/core/store/src/main/java/uk/gov/gchq/gaffer/store/StoreProperties.java b/core/store/src/main/java/uk/gov/gchq/gaffer/store/StoreProperties.java
index 5b3f5234d1..90c4f77c7b 100644
--- a/core/store/src/main/java/uk/gov/gchq/gaffer/store/StoreProperties.java
+++ b/core/store/src/main/java/uk/gov/gchq/gaffer/store/StoreProperties.java
@@ -453,7 +453,11 @@ public class StoreProperties implements Cloneable {
}
public String getCacheServiceNameSuffix() {
- return get(CACHE_SERVICE_NAME_SUFFIX, null);
+ return getCacheServiceNameSuffix(null);
+ }
+
+ public String getCacheServiceNameSuffix(final String defaultValue) {
+ return get(CACHE_SERVICE_NAME_SUFFIX, defaultValue);
}
public Properties getProperties() {
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
index 943394b756..d5b1ef05fe 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
@@ -178,7 +178,7 @@ public class FederatedStore extends Store {
*/
@Override
public void initialise(final String graphId, final Schema unused, final StoreProperties properties) throws StoreException {
- graphStorage = new FederatedGraphStorage(properties.getCacheServiceNameSuffix());
+ graphStorage = new FederatedGraphStorage(properties.getCacheServiceNameSuffix(graphId));
super.initialise(graphId, new Schema(), properties);
customPropertiesAuths = getCustomPropertiesAuths();
isPublicAccessAllowed = Boolean.valueOf(getProperties().getIsPublicAccessAllowed());
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreCacheTransient.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreCacheTransient.java
index 5966ced9f0..e2c4a6b652 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreCacheTransient.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreCacheTransient.java
@@ -22,6 +22,7 @@ import uk.gov.gchq.gaffer.commonutil.pair.Pair;
import uk.gov.gchq.gaffer.graph.Graph;
import uk.gov.gchq.gaffer.graph.GraphSerialisable;
+import java.util.Locale;
import java.util.Set;
import static java.util.Objects.isNull;
@@ -40,10 +41,14 @@ public class FederatedStoreCacheTransient extends Cache<String, Pair<GraphSerial
}
public FederatedStoreCacheTransient(final String cacheNameSuffix) {
- super(String.format("%s%s", CACHE_SERVICE_NAME_PREFIX,
+ super(getCacheNameFrom(cacheNameSuffix));
+ }
+
+ public static String getCacheNameFrom(final String cacheNameSuffix) {
+ return String.format("%s%s", CACHE_SERVICE_NAME_PREFIX,
nonNull(cacheNameSuffix)
- ? "_" + cacheNameSuffix.toLowerCase()
- : ""));
+ ? "_" + cacheNameSuffix.toLowerCase(Locale.getDefault())
+ : "");
}
/**
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreMultiCacheTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreMultiCacheTest.java
index 84aaaf4222..74f2710d37 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreMultiCacheTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreMultiCacheTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2022 Crown Copyright
+ * Copyright 2017-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -44,13 +44,14 @@ import static uk.gov.gchq.gaffer.user.StoreUser.testUser;
public class FederatedStoreMultiCacheTest {
- public static final User AUTH_USER = authUser();
- public static final User TEST_USER = testUser();
- public static final User BLANK_USER = blankUser();
+ private static final User AUTH_USER = authUser();
+ private static final User TEST_USER = testUser();
+ private static final User BLANK_USER = blankUser();
private static final AccumuloProperties ACCUMULO_PROPERTIES = loadAccumuloStoreProperties(ACCUMULO_STORE_SINGLE_USE_PROPERTIES);
- public FederatedStore federatedStore;
- public FederatedStore federatedStore2WithSameCache;
- public FederatedStoreProperties federatedStoreProperties;
+ private static final String USER_SAME_CACHE_SUFFIX = "UseSameCacheSuffix";
+ private FederatedStore federatedStore;
+ private FederatedStore federatedStore2WithSameCache;
+ private FederatedStoreProperties federatedStoreProperties;
@BeforeEach
public void setUp() throws Exception {
@@ -59,6 +60,7 @@ public class FederatedStoreMultiCacheTest {
federatedStoreProperties = new FederatedStoreProperties();
federatedStoreProperties.setCacheServiceClass(CACHE_SERVICE_CLASS_STRING);
federatedStoreProperties.set(HashMapCacheService.STATIC_CACHE, String.valueOf(true));
+ federatedStoreProperties.setCacheServiceNameSuffix(USER_SAME_CACHE_SUFFIX);
federatedStore = new FederatedStore();
federatedStore.initialise(GRAPH_ID_TEST_FEDERATED_STORE, null, federatedStoreProperties);
federatedStore.execute(new AddGraph.Builder()
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
index 5873d7d286..98bef2abd6 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
@@ -31,7 +31,6 @@ import uk.gov.gchq.gaffer.accumulostore.AccumuloStore;
import uk.gov.gchq.gaffer.accumulostore.SingleUseAccumuloStore;
import uk.gov.gchq.gaffer.cache.CacheServiceLoader;
import uk.gov.gchq.gaffer.cache.impl.HashMapCacheService;
-import uk.gov.gchq.gaffer.commonutil.CommonConstants;
import uk.gov.gchq.gaffer.commonutil.JsonAssert;
import uk.gov.gchq.gaffer.commonutil.StreamUtil;
import uk.gov.gchq.gaffer.data.element.Edge;
@@ -64,6 +63,7 @@ import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.Schema.Builder;
import uk.gov.gchq.gaffer.user.User;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -80,6 +80,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.junit.jupiter.api.Assertions.assertThrows;
+import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreCacheTransient.getCacheNameFrom;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.ACCUMULO_STORE_SINGLE_USE_PROPERTIES;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.ACCUMULO_STORE_SINGLE_USE_PROPERTIES_ALT;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.CACHE_SERVICE_CLASS_STRING;
@@ -113,22 +114,22 @@ import static uk.gov.gchq.gaffer.user.StoreUser.blankUser;
import static uk.gov.gchq.gaffer.user.StoreUser.testUser;
public class FederatedStoreTest {
- public static final String ID_SCHEMA_ENTITY = "basicEntitySchema";
- public static final String ID_SCHEMA_EDGE = "basicEdgeSchema";
- public static final String ID_PROPS_ACC_1 = "miniAccProps1";
- public static final String ID_PROPS_ACC_2 = "miniAccProps2";
- public static final String ID_PROPS_ACC_ALT = "miniAccProps3";
- public static final String INVALID = "invalid";
- public static final String UNUSUAL_KEY = "unusualKey";
- public static final String KEY_DOES_NOT_BELONG = UNUSUAL_KEY + " was added to " + ID_PROPS_ACC_2 + " it should not be there";
- public static final String PATH_INCOMPLETE_SCHEMA = "/schema/edgeX2NoTypesSchema.json";
- public static final String PATH_INCOMPLETE_SCHEMA_PART_2 = "/schema/edgeTypeSchema.json";
+ private static final String ID_SCHEMA_ENTITY = "basicEntitySchema";
+ private static final String ID_SCHEMA_EDGE = "basicEdgeSchema";
+ private static final String ID_PROPS_ACC_1 = "miniAccProps1";
+ private static final String ID_PROPS_ACC_2 = "miniAccProps2";
+ private static final String ID_PROPS_ACC_ALT = "miniAccProps3";
+ private static final String INVALID = "invalid";
+ private static final String UNUSUAL_KEY = "unusualKey";
+ private static final String KEY_DOES_NOT_BELONG = UNUSUAL_KEY + " was added to " + ID_PROPS_ACC_2 + " it should not be there";
+ private static final String PATH_INCOMPLETE_SCHEMA = "/schema/edgeX2NoTypesSchema.json";
+ private static final String PATH_INCOMPLETE_SCHEMA_PART_2 = "/schema/edgeTypeSchema.json";
private static final String ACC_ID_1 = "miniAccGraphId1";
private static final String ACC_ID_2 = "miniAccGraphId2";
private static final String MAP_ID_1 = "miniMapGraphId1";
private static final String FED_ID_1 = "subFedGraphId1";
private static final String INVALID_CACHE_SERVICE_CLASS_STRING = "uk.gov.gchq.invalid";
- private static final String CACHE_SERVICE_NAME = "federatedStoreGraphs";
+ private static final String CACHE_SERVICE_NAME = getCacheNameFrom(GRAPH_ID_TEST_FEDERATED_STORE);
private static AccumuloProperties properties1;
private static AccumuloProperties properties2;
private static AccumuloProperties propertiesAlt;
@@ -177,12 +178,12 @@ public class FederatedStoreTest {
assertThat(properties2).withFailMessage("Library has changed: " + ID_PROPS_ACC_2).isEqualTo(library.getProperties(ID_PROPS_ACC_2));
assertThat(propertiesAlt).withFailMessage("Library has changed: " + ID_PROPS_ACC_ALT).isEqualTo(library.getProperties(ID_PROPS_ACC_ALT));
- assertThat(new String(getSchemaFromPath(SCHEMA_EDGE_BASIC_JSON).toJson(false), CommonConstants.UTF_8))
+ assertThat(new String(getSchemaFromPath(SCHEMA_EDGE_BASIC_JSON).toJson(false), StandardCharsets.UTF_8))
.withFailMessage("Library has changed: " + ID_SCHEMA_EDGE)
- .isEqualTo(new String(library.getSchema(ID_SCHEMA_EDGE).toJson(false), CommonConstants.UTF_8));
- assertThat(new String(getSchemaFromPath(SCHEMA_ENTITY_BASIC_JSON).toJson(false), CommonConstants.UTF_8))
+ .isEqualTo(new String(library.getSchema(ID_SCHEMA_EDGE).toJson(false), StandardCharsets.UTF_8));
+ assertThat(new String(getSchemaFromPath(SCHEMA_ENTITY_BASIC_JSON).toJson(false), StandardCharsets.UTF_8))
.withFailMessage("Library has changed: " + ID_SCHEMA_ENTITY)
- .isEqualTo(new String(library.getSchema(ID_SCHEMA_ENTITY).toJson(false), CommonConstants.UTF_8));
+ .isEqualTo(new String(library.getSchema(ID_SCHEMA_ENTITY).toJson(false), StandardCharsets.UTF_8));
}
@Test
@@ -227,7 +228,7 @@ public class FederatedStoreTest {
@Test
public void shouldThrowErrorForMissingProperty() {
// When / Then
- final List<String> schemas = asList(ID_SCHEMA_EDGE);
+ final List<String> schemas = singletonList(ID_SCHEMA_EDGE);
final Exception actual = assertThrows(Exception.class,
() -> store.execute(new AddGraph.Builder()
.graphId(ACC_ID_2)
@@ -590,7 +591,7 @@ public class FederatedStoreTest {
.graphId(ACC_ID_2)
.storeProperties(propertiesAlt)
.isPublic(true)
- .parentSchemaIds(asList(ID_SCHEMA_ENTITY))
+ .parentSchemaIds(singletonList(ID_SCHEMA_ENTITY))
.build(), blankUserContext);
// Then
@@ -638,7 +639,7 @@ public class FederatedStoreTest {
@Test
public void shouldAddGraphWithSchemaFromGraphLibraryOverridden() throws Exception {
- final List<String> schemas = asList(ID_SCHEMA_ENTITY);
+ final List<String> schemas = singletonList(ID_SCHEMA_ENTITY);
store.execute(new AddGraph.Builder()
.graphId(ACC_ID_2)
.isPublic(true)
@@ -669,7 +670,7 @@ public class FederatedStoreTest {
.storeProperties(propertiesAlt)
.parentPropertiesId(ID_PROPS_ACC_2)
.schema(tempSchema.build())
- .parentSchemaIds(asList(ID_SCHEMA_ENTITY))
+ .parentSchemaIds(singletonList(ID_SCHEMA_ENTITY))
.build(), blankUserContext);
// Then
@@ -698,7 +699,7 @@ public class FederatedStoreTest {
actual = assertThrows(Exception.class,
() -> store.execute(new AddGraph.Builder()
.graphId(ACC_ID_2)
- .parentSchemaIds(asList(ID_SCHEMA_EDGE))
+ .parentSchemaIds(singletonList(ID_SCHEMA_EDGE))
.isPublic(true)
.build(), blankUserContext));
@@ -889,7 +890,7 @@ public class FederatedStoreTest {
.graphId(ACC_ID_2)
.storeProperties(propertiesAlt)
.isPublic(true)
- .parentSchemaIds(asList(ID_SCHEMA_ENTITY))
+ .parentSchemaIds(singletonList(ID_SCHEMA_ENTITY))
.build(), blankUserContext))
.withStackTraceContaining(error);
Mockito.verify(mockLibrary).getSchema(ID_SCHEMA_ENTITY);
@@ -1242,8 +1243,8 @@ public class FederatedStoreTest {
final Entity A = getEntityA();
final Entity B = getEntityB();
- final List<Entity> expectedA = asList(A);
- final List<Entity> expectedB = asList(B);
+ final List<Entity> expectedA = singletonList(A);
+ final List<Entity> expectedB = singletonList(B);
addElementsToNewGraph(A, "graphA", SCHEMA_ENTITY_A_JSON);
addElementsToNewGraph(B, "graphB", SCHEMA_ENTITY_B_JSON);
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTestUtil.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTestUtil.java
index 7ec78d6958..979bc472eb 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTestUtil.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTestUtil.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2022 Crown Copyright
+ * Copyright 2022-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -22,6 +22,7 @@ import org.assertj.core.api.ListAssert;
import uk.gov.gchq.gaffer.accumulostore.AccumuloProperties;
import uk.gov.gchq.gaffer.cache.CacheServiceLoader;
+import uk.gov.gchq.gaffer.cache.impl.HashMapCacheService;
import uk.gov.gchq.gaffer.commonutil.ExecutorService;
import uk.gov.gchq.gaffer.commonutil.StreamUtil;
import uk.gov.gchq.gaffer.data.element.Edge;
@@ -93,7 +94,7 @@ public final class FederatedStoreTestUtil {
public static final String VALUE_1 = value(1);
public static final String VALUE_2 = value(2);
public static final String INTEGER = "integer";
- public static final String CACHE_SERVICE_CLASS_STRING = "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService";
+ public static final String CACHE_SERVICE_CLASS_STRING = HashMapCacheService.class.getCanonicalName();
public static final Set<String> GRAPH_AUTHS_ALL_USERS = ImmutableSet.of(ALL_USERS);
private FederatedStoreTestUtil() {
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/integration/FederatedAdminIT.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/integration/FederatedAdminIT.java
index a69d58d3c1..10d7a1ed9d 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/integration/FederatedAdminIT.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/integration/FederatedAdminIT.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2020-2022 Crown Copyright
+ * Copyright 2020-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -97,7 +97,7 @@ public class FederatedAdminIT extends AbstractStandaloneFederatedStoreIT {
@Test
public void shouldRemoveGraphFromCache() throws Exception {
//given
- FederatedStoreCache federatedStoreCache = new FederatedStoreCache();
+ FederatedStoreCache federatedStoreCache = new FederatedStoreCache(graph.getGraphId());
graph.execute(new AddGraph.Builder()
.graphId(GRAPH_ID_A)
.schema(new Schema())
@@ -532,7 +532,7 @@ public class FederatedAdminIT extends AbstractStandaloneFederatedStoreIT {
@Test
public void shouldStartWithEmptyCache() throws Exception {
//given
- FederatedStoreCache federatedStoreCache = new FederatedStoreCache();
+ FederatedStoreCache federatedStoreCache = new FederatedStoreCache(graph.getGraphId());
//then
assertThat(federatedStoreCache.getAllGraphIds()).isEmpty();
@@ -567,7 +567,7 @@ public class FederatedAdminIT extends AbstractStandaloneFederatedStoreIT {
public void shouldChangeGraphIdInCache() throws Exception {
//given
String newName = "newName" + 23452335;
- FederatedStoreCache federatedStoreCache = new FederatedStoreCache();
+ FederatedStoreCache federatedStoreCache = new FederatedStoreCache(graph.getGraphId());
graph.execute(new AddGraph.Builder()
.graphId(GRAPH_ID_A)
@@ -621,7 +621,7 @@ public class FederatedAdminIT extends AbstractStandaloneFederatedStoreIT {
@Test
public void shouldChangeGraphAccessIdInCache() throws Exception {
//given
- FederatedStoreCache federatedStoreCache = new FederatedStoreCache();
+ FederatedStoreCache federatedStoreCache = new FederatedStoreCache(graph.getGraphId());
graph.execute(new AddGraph.Builder()
.graphId(GRAPH_ID_A)
.schema(new Schema()) | ['core/operation/src/main/java/uk/gov/gchq/gaffer/jobtracker/JobTracker.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreCacheTransient.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/integration/FederatedAdminIT.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreMultiCacheTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTestUtil.java', 'core/store/src/main/java/uk/gov/gchq/gaffer/store/StoreProperties.java'] | {'.java': 8} | 8 | 8 | 0 | 0 | 8 | 4,038,354 | 833,218 | 107,685 | 1,103 | 1,082 | 193 | 21 | 4 | 2,049 | 262 | 445 | 52 | 0 | 1 | 2023-02-23T11:00:02 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
913 | gchq/gaffer/2884/2881 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/2881 | https://github.com/gchq/Gaffer/pull/2884 | https://github.com/gchq/Gaffer/pull/2884 | 1 | resolve | Problems with getOriginalSchema in Federated and Proxy Stores | Problems have been reported fetching schemas with Federated and Proxy Stores. The current implementation of these stores contain overridden methods for `getSchema` but not `getOriginalSchema`. It's likely that with certain combinations of graphs and stores, the `originalSchema` field could remain `null` or be outdated. This could cause problems with the getSchema operation when used directly on the store (not wrapped) and differences between using a `Store` directly and using it through a `Graph`.
Investigate and fix any problems with Federated and Proxy Store use of `getSchema`/`getOriginalSchema`. This might be as simple as adding overridden methods for `getOriginalSchema`.
**Additional context**
See #2791 for background.
| 5f22ae68b1bef77c398919b19a17c4b9f9e3c34f | 9ef3e70589b023d3d1b56e079091059d8ca7f44b | https://github.com/gchq/gaffer/compare/5f22ae68b1bef77c398919b19a17c4b9f9e3c34f...9ef3e70589b023d3d1b56e079091059d8ca7f44b | diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java
index 033b3b7c55..1b6aa96e16 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java
@@ -27,17 +27,10 @@ import uk.gov.gchq.gaffer.cache.exception.CacheOperationException;
import uk.gov.gchq.gaffer.commonutil.JsonUtil;
import uk.gov.gchq.gaffer.commonutil.exception.OverwritingException;
import uk.gov.gchq.gaffer.commonutil.pair.Pair;
-import uk.gov.gchq.gaffer.core.exception.GafferRuntimeException;
-import uk.gov.gchq.gaffer.data.elementdefinition.exception.SchemaException;
import uk.gov.gchq.gaffer.federatedstore.exception.StorageException;
-import uk.gov.gchq.gaffer.federatedstore.operation.FederatedOperation;
import uk.gov.gchq.gaffer.graph.GraphConfig;
import uk.gov.gchq.gaffer.graph.GraphSerialisable;
-import uk.gov.gchq.gaffer.store.Context;
import uk.gov.gchq.gaffer.store.library.GraphLibrary;
-import uk.gov.gchq.gaffer.store.operation.GetSchema;
-import uk.gov.gchq.gaffer.store.schema.Schema;
-import uk.gov.gchq.gaffer.store.schema.Schema.Builder;
import uk.gov.gchq.gaffer.user.User;
import java.util.ArrayList;
@@ -234,35 +227,6 @@ public class FederatedGraphStorage {
return Collections.unmodifiableList(rtn);
}
- @Deprecated
- public Schema getSchema(final FederatedOperation<Void, Object> operation, final Context context) {
- if (null == context || null == context.getUser()) {
- // no user then return an empty schema
- return new Schema();
- }
- final List<String> graphIds = isNull(operation) ? null : operation.getGraphIds();
-
- final Stream<GraphSerialisable> graphs = getStream(context.getUser(), graphIds);
- final Builder schemaBuilder = new Builder();
- try {
- if (nonNull(operation) && operation.hasPayloadOperation() && operation.payloadInstanceOf(GetSchema.class) && ((GetSchema) operation.getPayloadOperation()).isCompact()) {
- graphs.forEach(gs -> {
- try {
- schemaBuilder.merge(gs.getGraph().execute((GetSchema) operation.getPayloadOperation(), context));
- } catch (final Exception e) {
- throw new GafferRuntimeException("Unable to fetch schema from graph " + gs.getGraphId(), e);
- }
- });
- } else {
- graphs.forEach(g -> schemaBuilder.merge(g.getSchema(graphLibrary)));
- }
- } catch (final SchemaException e) {
- final List<String> resultGraphIds = getStream(context.getUser(), graphIds).map(GraphSerialisable::getGraphId).collect(Collectors.toList());
- throw new SchemaException("Unable to merge the schemas for all of your federated graphs: " + resultGraphIds + ". You can limit which graphs to query for using the FederatedOperation.graphIds option.", e);
- }
- return schemaBuilder.build();
- }
-
private void validateAllGivenGraphIdsAreVisibleForUser(final User user, final Collection<String> graphIds, final String adminAuth) {
if (null != graphIds) {
final Collection<String> visibleIds = getAllIds(user, adminAuth);
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
index c880c5ad4c..70aa2e2d13 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java
@@ -58,6 +58,7 @@ import uk.gov.gchq.gaffer.federatedstore.util.ApplyViewToElementsFunction;
import uk.gov.gchq.gaffer.federatedstore.util.MergeSchema;
import uk.gov.gchq.gaffer.graph.GraphSerialisable;
import uk.gov.gchq.gaffer.operation.Operation;
+import uk.gov.gchq.gaffer.operation.OperationException;
import uk.gov.gchq.gaffer.operation.impl.Validate;
import uk.gov.gchq.gaffer.operation.impl.add.AddElements;
import uk.gov.gchq.gaffer.operation.impl.function.Aggregate;
@@ -108,7 +109,6 @@ import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreProperties.IS_PUBL
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreProperties.STORE_CONFIGURED_GRAPHIDS;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreProperties.STORE_CONFIGURED_MERGE_FUNCTIONS;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getCleanStrings;
-import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getFederatedWrappedSchema;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.loadStoreConfiguredGraphIdsListFrom;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.loadStoreConfiguredMergeFunctionMapFrom;
@@ -324,34 +324,50 @@ public class FederatedStore extends Store {
}
/**
- * Get {@link Schema} for this FederatedStore
+ * This method exists for compatibility only. It will
+ * always return a blank {@link Schema}. Either use the
+ * {@link FederatedStore#getSchema} method and supply a
+ * {@link Context}, or ideally use the {@link GetSchema}
+ * operation instead.
*
- * @return schema
+ * @return {@link Schema} blank schema
*/
@Override
public Schema getSchema() {
- return getSchema((Context) null);
+ return getSchema(new Context(), true);
}
/**
- * Get {@link Schema} for this FederatedStore
+ * This method exists for compatibility only. It will
+ * always return a blank {@link Schema}. Either use the
+ * {@link FederatedStore#getSchema} method and supply a
+ * {@link Context}, or ideally use the {@link GetSchema}
+ * operation instead.
*
- * @param context context with User.
- * @return schema
+ * @return {@link Schema} blank schema
*/
- public Schema getSchema(final Context context) {
- return getSchema(getFederatedWrappedSchema(), context);
+ @Override
+ public Schema getOriginalSchema() {
+ return getSchema(new Context(), false);
}
/**
- * Get {@link Schema} for this FederatedStore
+ * Get {@link Schema} for this FederatedStore.
+ * <p>
+ * This will return a merged schema of the original schemas
+ * or the optimised compact schemas of the stores inside
+ * this FederatedStore.
*
- * @param operation operation with graphIds.
- * @param context context with User.
+ * @param context context with valid User
+ * @param getCompactSchema if true, gets the optimised compact schemas
* @return schema
*/
- public Schema getSchema(final FederatedOperation operation, final Context context) {
- return graphStorage.getSchema(operation, context);
+ public Schema getSchema(final Context context, final boolean getCompactSchema) {
+ try {
+ return execute(new GetSchema.Builder().compact(getCompactSchema).build(), context);
+ } catch (final OperationException e) {
+ throw new GafferRuntimeException("Unable to execute GetSchema Operation", e);
+ }
}
/**
@@ -589,8 +605,8 @@ public class FederatedStore extends Store {
final List<String> graphIds = new ArrayList<>(storeConfiguredGraphIds);
final List<String> federatedStoreSystemUser = getAllGraphIds(new User.Builder()
- .userId(FEDERATED_STORE_SYSTEM_USER)
- .opAuths(this.getProperties().getAdminAuth()).build(),
+ .userId(FEDERATED_STORE_SYSTEM_USER)
+ .opAuths(this.getProperties().getAdminAuth()).build(),
true);
graphIds.retainAll(federatedStoreSystemUser);
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidator.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidator.java
index d7f8b9c851..c1bc7dfb5b 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidator.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2022 Crown Copyright
+ * Copyright 2016-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,13 +16,16 @@
package uk.gov.gchq.gaffer.federatedstore.operation;
+import uk.gov.gchq.gaffer.core.exception.GafferRuntimeException;
import uk.gov.gchq.gaffer.data.elementdefinition.view.View;
import uk.gov.gchq.gaffer.federatedstore.FederatedStore;
import uk.gov.gchq.gaffer.graph.GraphSerialisable;
import uk.gov.gchq.gaffer.operation.Operation;
+import uk.gov.gchq.gaffer.operation.OperationException;
import uk.gov.gchq.gaffer.store.Context;
import uk.gov.gchq.gaffer.store.Store;
import uk.gov.gchq.gaffer.store.StoreTrait;
+import uk.gov.gchq.gaffer.store.operation.GetSchema;
import uk.gov.gchq.gaffer.store.operation.OperationChainValidator;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.ViewValidator;
@@ -35,7 +38,6 @@ import java.util.List;
import java.util.stream.Collectors;
import static java.util.Objects.nonNull;
-import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getFederatedWrappedSchema;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.shallowCloneWithDeepOptions;
/**
@@ -51,9 +53,13 @@ public class FederatedOperationChainValidator extends OperationChainValidator {
@Override
protected Schema getSchema(final Operation op, final User user, final Store store) {
- return (op instanceof FederatedOperation)
- ? ((FederatedStore) store).getSchema(getFederatedWrappedSchema().graphIds(((FederatedOperation) op).getGraphIds()), new Context(user))
- : ((FederatedStore) store).getSchema(getFederatedWrappedSchema(), new Context(user));
+ try {
+ return (op instanceof FederatedOperation)
+ ? store.execute(new FederatedOperation.Builder().<Void, Schema>op(new GetSchema()).graphIds(((FederatedOperation<?, ?>) op).getGraphIds()).build(), new Context(user))
+ : store.execute(new GetSchema(), new Context(user));
+ } catch (final OperationException e) {
+ throw new GafferRuntimeException("Unable to execute GetSchema Operation", e);
+ }
}
@Override
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/FederatedDelegateToHandler.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/FederatedDelegateToHandler.java
index 7df3243ad5..fc55f365b3 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/FederatedDelegateToHandler.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/FederatedDelegateToHandler.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2022 Crown Copyright
+ * Copyright 2022-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -44,7 +44,7 @@ public class FederatedDelegateToHandler implements OutputOperationHandler<InputO
|| ValidateHandler.class.isAssignableFrom(handler.getClass())
|| AggregateHandler.class.isAssignableFrom(handler.getClass())) {
// Use the doOperation which requires a schema.
- return (Iterable<? extends Element>) ((OperationWithSchemaHandler) handler).doOperation(operation, ((FederatedStore) store).getSchema(context));
+ return (Iterable<? extends Element>) ((OperationWithSchemaHandler) handler).doOperation(operation, ((FederatedStore) store).getSchema(context, true));
} else {
return handler.doOperation(operation, context, store);
}
diff --git a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/util/FederatedStoreUtil.java b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/util/FederatedStoreUtil.java
index 5732b983b8..b36bb9101d 100644
--- a/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/util/FederatedStoreUtil.java
+++ b/store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/util/FederatedStoreUtil.java
@@ -280,11 +280,6 @@ public final class FederatedStoreUtil {
return deprecatedGraphIds;
}
- @Deprecated
- public static FederatedOperation<Void, Iterable<Schema>> getFederatedWrappedSchema() {
- return new FederatedOperation.Builder().<Void, Iterable<Schema>>op(new GetSchema()).build();
- }
-
/**
* Return a clone of the given operations with a deep clone of options.
* <p>
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorageTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorageTest.java
index af1054900d..e4499db2ab 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorageTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorageTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2022 Crown Copyright
+ * Copyright 2017-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -50,7 +50,6 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static uk.gov.gchq.gaffer.federatedstore.FederatedGraphStorage.GRAPH_IDS_NOT_VISIBLE;
@@ -60,9 +59,6 @@ import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.EDGES;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.ENTITIES;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.GRAPH_ID_ACCUMULO;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.STRING;
-import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.contextAuthUser;
-import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.contextBlankUser;
-import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.contextTestUser;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.loadAccumuloStoreProperties;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.resetForFederatedTests;
import static uk.gov.gchq.gaffer.store.TestTypes.DIRECTED_EITHER;
@@ -295,82 +291,6 @@ public class FederatedGraphStorageTest {
.withMessage(String.format(GRAPH_IDS_NOT_VISIBLE, singleton(X)));
}
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldChangeSchemaWhenAddingGraphB() throws Exception {
- //given
- graphStorage.put(graphSerialisableA, auth1Access);
- final Schema schemaA = graphStorage.getSchema(null, contextTestUser());
- assertEquals(1, schemaA.getTypes().size());
- assertEquals(String.class, schemaA.getType(STRING + 1).getClazz());
- assertEquals(getEntityDefinition(1), schemaA.getElement(ENTITIES + 1));
- graphStorage.put(graphSerialisableB, auth1Access);
- final Schema schemaAB = graphStorage.getSchema(null, contextTestUser());
- assertNotEquals(schemaA, schemaAB);
- assertEquals(2, schemaAB.getTypes().size());
- assertEquals(String.class, schemaAB.getType(STRING + 1).getClazz());
- assertEquals(String.class, schemaAB.getType(STRING + 2).getClazz());
- assertEquals(getEntityDefinition(1), schemaAB.getElement(ENTITIES + 1));
- assertEquals(getEntityDefinition(2), schemaAB.getElement(ENTITIES + 2));
- }
-
-
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldGetSchemaForOwningUser() throws Exception {
- graphStorage.put(graphSerialisableA, auth1Access);
- graphStorage.put(graphSerialisableB, new FederatedAccess(singleton(X), X));
- final Schema schema = graphStorage.getSchema(null, contextTestUser());
- assertNotEquals(2, schema.getTypes().size(), "Revealing hidden schema");
- assertEquals(1, schema.getTypes().size());
- assertEquals(String.class, schema.getType(STRING + 1).getClazz());
- assertEquals(getEntityDefinition(1), schema.getElement(ENTITIES + 1));
- }
-
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldNotGetSchemaForOwningUserWhenBlockingReadAccessPredicateConfigured() throws Exception {
- graphStorage.put(graphSerialisableA, blockingReadAccess);
- graphStorage.put(graphSerialisableB, new FederatedAccess(singleton(X), X));
- final Schema schema = graphStorage.getSchema(null, contextTestUser());
- assertNotEquals(2, schema.getTypes().size(), "Revealing hidden schema");
- assertEquals(0, schema.getTypes().size(), "Revealing hidden schema");
- }
-
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldGetSchemaForAuthUser() throws Exception {
- graphStorage.put(graphSerialisableA, auth1Access);
- graphStorage.put(graphSerialisableB, new FederatedAccess(singleton(X), X));
- final Schema schema = graphStorage.getSchema(null, contextAuthUser());
- assertNotEquals(2, schema.getTypes().size(), "Revealing hidden schema");
- assertEquals(1, schema.getTypes().size());
- assertEquals(String.class, schema.getType(STRING + 1).getClazz());
- assertEquals(getEntityDefinition(1), schema.getElement(ENTITIES + 1));
- }
-
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldNotGetSchemaForBlankUser() throws Exception {
- graphStorage.put(graphSerialisableA, auth1Access);
- graphStorage.put(graphSerialisableB, new FederatedAccess(singleton(X), X));
- final Schema schema = graphStorage.getSchema(null, contextBlankUser());
- assertNotEquals(2, schema.getTypes().size(), "Revealing hidden schema");
- assertEquals(0, schema.getTypes().size(), "Revealing hidden schema");
- }
-
- @Test
- @Deprecated // TODO FS move to FedSchema Tests, when getSchema is deleted
- public void shouldGetSchemaForBlankUserWhenPermissiveReadAccessPredicateConfigured() throws Exception {
- graphStorage.put(graphSerialisableA, permissiveReadAccess);
- graphStorage.put(graphSerialisableB, new FederatedAccess(singleton(X), X));
- final Schema schema = graphStorage.getSchema(null, contextBlankUser());
- assertNotEquals(2, schema.getTypes().size(), "Revealing hidden schema");
- assertEquals(1, schema.getTypes().size());
- assertEquals(String.class, schema.getType(STRING + 1).getClazz());
- assertEquals(getEntityDefinition(1), schema.getElement(ENTITIES + 1));
- }
-
@Test
public void shouldRemoveForOwningUser() throws Exception {
//given
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreSchemaTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreSchemaTest.java
index 6eb7904ed9..38957ba4c4 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreSchemaTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreSchemaTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2022 Crown Copyright
+ * Copyright 2017-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,6 +19,9 @@ package uk.gov.gchq.gaffer.federatedstore;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import uk.gov.gchq.gaffer.access.predicate.AccessPredicate;
+import uk.gov.gchq.gaffer.access.predicate.NoAccessPredicate;
+import uk.gov.gchq.gaffer.access.predicate.UnrestrictedAccessPredicate;
import uk.gov.gchq.gaffer.accumulostore.AccumuloProperties;
import uk.gov.gchq.gaffer.data.element.Edge;
import uk.gov.gchq.gaffer.data.element.Element;
@@ -47,8 +50,10 @@ import uk.gov.gchq.koryphe.impl.predicate.IsEqual;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
+import java.util.Set;
import java.util.stream.Collectors;
+import static java.util.Collections.singleton;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.ACCUMULO_STORE_SINGLE_USE_PROPERTIES;
@@ -71,6 +76,9 @@ import static uk.gov.gchq.gaffer.federatedstore.FederatedStoreTestUtil.resetForF
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getDefaultMergeFunction;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getFederatedOperation;
import static uk.gov.gchq.gaffer.store.TestTypes.DIRECTED_EITHER;
+import static uk.gov.gchq.gaffer.user.StoreUser.AUTH_1;
+import static uk.gov.gchq.gaffer.user.StoreUser.authUser;
+import static uk.gov.gchq.gaffer.user.StoreUser.blankUser;
import static uk.gov.gchq.gaffer.user.StoreUser.testUser;
public class FederatedStoreSchemaTest {
@@ -263,6 +271,133 @@ public class FederatedStoreSchemaTest {
.isTrue();
}
+ @Test
+ public void shouldGetSchemaWithOperationAndMethodWithContext() throws OperationException {
+ // Given
+ addGraphWith(GRAPH_ID_A, STRING_TYPE, PROPERTY_1);
+
+ // When
+ final Schema schemaFromOperation = federatedStore.execute(new GetSchema.Builder().build(), testContext);
+ final Schema schemaFromStore = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaFromOperation).isEqualTo(schemaFromStore);
+ }
+
+ @Test
+ public void shouldGetBlankSchemaWhenUsingDefaultMethod() throws OperationException {
+ // Given
+ addGraphWith(GRAPH_ID_A, STRING_TYPE, PROPERTY_1);
+
+ // When
+ final Schema schemaFromStoreMethod = federatedStore.getOriginalSchema(); // No Context, results in blank schema returned
+
+ // Then
+ assertThat(schemaFromStoreMethod).isEqualTo(new Schema());
+ }
+
+ @Test
+ public void shouldGetSchemaWhenUsingDefaultMethodWhenPermissiveReadAccessPredicateConfigured() throws OperationException {
+ // Given
+ addGraphWithContextAndAccess(GRAPH_ID_A, STRING_TYPE, GROUP_BASIC_EDGE, testContext, new UnrestrictedAccessPredicate(), PROPERTY_1);
+
+ // When
+ final Schema schemaFromStoreMethod = federatedStore.getOriginalSchema();
+
+ // Then
+ assertThat(schemaFromStoreMethod.getEdge(GROUP_BASIC_EDGE).getProperties()).contains(PROPERTY_1);
+ }
+
+ @Test
+ public void shouldChangeSchemaWhenAddingGraphB() throws OperationException {
+ // Given
+ addGraphWith(GRAPH_ID_A, STRING_TYPE, PROPERTY_1);
+
+ // When
+ final Schema schemaA = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaA.getTypes().size()).isEqualTo(2);
+ assertThat(schemaA.getType(STRING).getClazz()).isEqualTo(String.class);
+ assertThat(schemaA.getEdge(GROUP_BASIC_EDGE).getProperties().size()).isEqualTo(1);
+
+ // Given
+ addGraphWith(GRAPH_ID_B, STRING_REQUIRED_TYPE, PROPERTY_2);
+
+ // When
+ final Schema schemaAB = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaAB).isNotEqualTo(schemaA);
+ assertThat(schemaAB.getEdge(GROUP_BASIC_EDGE).getProperties()).contains(PROPERTY_2);
+ }
+
+ @Test
+ public void shouldGetSchemaForOwningUser() throws OperationException {
+ // Given
+ addGraphWith(GRAPH_ID_A, STRING_REQUIRED_TYPE, PROPERTY_1);
+ addGraphWithContextAndAuths(GRAPH_ID_B, STRING_TYPE, "hidden" + GROUP_BASIC_EDGE, singleton(AUTH_1), new Context(authUser()), PROPERTY_2);
+
+ // When
+ final Schema schemaFromOwningUser = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaFromOwningUser.getEdge("hidden" + GROUP_BASIC_EDGE)).withFailMessage("Revealing hidden schema").isNull();
+ assertThat(schemaFromOwningUser.getEdge(GROUP_BASIC_EDGE).getProperties()).contains(PROPERTY_1);
+ }
+
+ @Test
+ public void shouldNotGetSchemaForOwningUserWhenBlockingReadAccessPredicateConfigured() throws OperationException {
+ // Given
+ addGraphWithContextAndAccess(GRAPH_ID_A, STRING_TYPE, GROUP_BASIC_EDGE, testContext, new NoAccessPredicate(), PROPERTY_1);
+
+ // When
+ final Schema schemaFromOwningUser = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaFromOwningUser).withFailMessage("Revealing blocked schema, should be empty").isEqualTo(new Schema());
+ }
+
+ @Test
+ public void shouldGetSchemaForAuthUser() throws OperationException {
+ // Given
+ final User authUser = new User.Builder().userId("authUser2").opAuths(AUTH_1).build();
+ addGraphWithContextAndAuths(GRAPH_ID_B, STRING_TYPE, GROUP_BASIC_EDGE, singleton(AUTH_1), new Context(authUser()), PROPERTY_1);
+
+ // When
+ final Schema schemaFromAuthUser = federatedStore.getSchema(new Context(authUser), false);
+ final Schema schemaFromTestUser = federatedStore.getSchema(testContext, false);
+
+ // Then
+ assertThat(schemaFromTestUser.getEdge("hidden" + GROUP_BASIC_EDGE)).withFailMessage("Revealing hidden schema").isNull();
+ assertThat(schemaFromTestUser).withFailMessage("Revealing hidden schema, should be empty").isEqualTo(new Schema());
+ assertThat(schemaFromAuthUser.getEdge(GROUP_BASIC_EDGE).getProperties()).contains(PROPERTY_1);
+ }
+
+ @Test
+ public void shouldNotGetSchemaForBlankUser() throws OperationException {
+ // Given
+ addGraphWith(GRAPH_ID_A, STRING_REQUIRED_TYPE, PROPERTY_1);
+
+ // When
+ final Schema schemaFromBlankUser = federatedStore.getSchema(new Context(blankUser()), false);
+
+ // Then
+ assertThat(schemaFromBlankUser).withFailMessage("Revealing schema to blank user, should be empty").isEqualTo(new Schema());
+ }
+
+ @Test
+ public void shouldGetSchemaForBlankUserWhenPermissiveReadAccessPredicateConfigured() throws OperationException {
+ // Given
+ addGraphWithContextAndAccess(GRAPH_ID_A, STRING_TYPE, GROUP_BASIC_EDGE, testContext, new UnrestrictedAccessPredicate(), PROPERTY_1);
+
+ // When
+ final Schema schemaFromBlankUser = federatedStore.getSchema(new Context(blankUser()), false);
+
+ // Then
+ assertThat(schemaFromBlankUser.getEdge(GROUP_BASIC_EDGE).getProperties()).contains(PROPERTY_1);
+ }
+
@Test
public void shouldValidateCorrectlyWithOverlappingSchemasUsingDefaultMergeFunction() throws OperationException {
// Given
@@ -788,11 +923,11 @@ public class FederatedStoreSchemaTest {
addGraphWith(GRAPH_ID_B, stringSchema, PROPERTY_1, PROPERTY_2);
}
- private void addGraphWith(final String graphId, final Schema stringType, final String... property) throws OperationException {
- federatedStore.execute(new AddGraph.Builder()
+ private AddGraph.Builder getAddGraphBuilder(final String graphId, final Schema stringType, final String edgeGroup, final String... property) {
+ return new AddGraph.Builder()
.graphId(graphId)
.schema(new Schema.Builder()
- .edge(GROUP_BASIC_EDGE, new SchemaEdgeDefinition.Builder()
+ .edge(edgeGroup, new SchemaEdgeDefinition.Builder()
.source(STRING)
.destination(STRING)
.directed(DIRECTED_EITHER)
@@ -801,10 +936,28 @@ public class FederatedStoreSchemaTest {
.type(DIRECTED_EITHER, Boolean.class)
.merge(stringType)
.build())
- .storeProperties(STORE_PROPERTIES.clone())
+ .storeProperties(STORE_PROPERTIES.clone());
+ }
+
+ private void addGraphWith(final String graphId, final Schema stringType, final String... property) throws OperationException {
+ federatedStore.execute(getAddGraphBuilder(graphId, stringType, GROUP_BASIC_EDGE, property)
.build(), testContext);
}
+ private void addGraphWithContextAndAuths(final String graphId, final Schema stringType, final String edgeGroup, Set<String> graphAuths,
+ Context context, final String... property) throws OperationException {
+ federatedStore.execute(getAddGraphBuilder(graphId, stringType, edgeGroup, property)
+ .graphAuths(graphAuths.toArray(new String[0]))
+ .build(), context);
+ }
+
+ private void addGraphWithContextAndAccess(final String graphId, final Schema stringType, final String edgeGroup, Context context,
+ AccessPredicate read, final String... property) throws OperationException {
+ federatedStore.execute(getAddGraphBuilder(graphId, stringType, edgeGroup, property)
+ .readAccessPredicate(read)
+ .build(), context);
+ }
+
private void addEdgeBasicWith(final String destination, final Integer... propertyValues) throws OperationException {
federatedStore.execute(new AddElements.Builder()
.input(edgeBasicWith(destination, propertyValues))
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
index 5da3e90a41..5873d7d286 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2022 Crown Copyright
+ * Copyright 2017-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -323,9 +323,9 @@ public class FederatedStoreTest {
public void shouldUpdateSchemaWhenNewGraphIsAdded() throws Exception {
// Given
addGraphWithPaths(ACC_ID_1, propertiesAlt, blankUserContext, SCHEMA_ENTITY_BASIC_JSON);
- final Schema before = store.getSchema(new Context(blankUser));
+ final Schema before = store.getSchema(new Context(blankUser), true);
addGraphWithPaths(ACC_ID_2, propertiesAlt, blankUserContext, SCHEMA_EDGE_BASIC_JSON);
- final Schema after = store.getSchema(new Context(blankUser));
+ final Schema after = store.getSchema(new Context(blankUser), true);
// Then
assertThat(before).isNotEqualTo(after);
}
@@ -335,15 +335,15 @@ public class FederatedStoreTest {
public void shouldUpdateSchemaWhenNewGraphIsRemoved() throws Exception {
// Given
addGraphWithPaths(ACC_ID_1, propertiesAlt, blankUserContext, SCHEMA_ENTITY_BASIC_JSON);
- final Schema was = store.getSchema(new Context(blankUser));
+ final Schema was = store.getSchema(new Context(blankUser), true);
addGraphWithPaths(ACC_ID_2, propertiesAlt, blankUserContext, SCHEMA_EDGE_BASIC_JSON);
- final Schema before = store.getSchema(new Context(blankUser));
+ final Schema before = store.getSchema(new Context(blankUser), true);
// When
store.remove(ACC_ID_2, blankUser);
- final Schema after = store.getSchema(new Context(blankUser));
+ final Schema after = store.getSchema(new Context(blankUser), true);
assertThat(before).isNotEqualTo(after);
assertThat(was).isEqualTo(after);
}
@@ -1160,8 +1160,8 @@ public class FederatedStoreTest {
final Iterable<? extends Element> elements = store
.execute(new GetAllElements.Builder()
.view(new View.Builder()
- .edges(store.getSchema(context).getEdgeGroups()) //here
- .entities(store.getSchema(context).getEntityGroups()) //here 59 -> 58
+ .edges(store.getSchema(context, true).getEdgeGroups()) //here
+ .entities(store.getSchema(context, true).getEntityGroups()) //here 59 -> 58
.build())
.build(), context);
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidatorTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidatorTest.java
index 13117e8053..fb5aa9793b 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidatorTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidatorTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2022 Crown Copyright
+ * Copyright 2017-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -29,6 +29,7 @@ import uk.gov.gchq.gaffer.operation.Operation;
import uk.gov.gchq.gaffer.operation.OperationException;
import uk.gov.gchq.gaffer.operation.impl.get.GetAllElements;
import uk.gov.gchq.gaffer.store.Context;
+import uk.gov.gchq.gaffer.store.operation.GetSchema;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.ViewValidator;
import uk.gov.gchq.gaffer.user.User;
@@ -37,16 +38,14 @@ import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getFederatedOperation;
-import static uk.gov.gchq.gaffer.federatedstore.util.FederatedStoreUtil.getFederatedWrappedSchema;
public class FederatedOperationChainValidatorTest {
@Test
- public void shouldGetFederatedSchema() {
+ public void shouldGetFederatedSchema() throws OperationException {
// Given
final ViewValidator viewValidator = mock(FederatedViewValidator.class);
final FederatedOperationChainValidator validator = new FederatedOperationChainValidator(viewValidator);
@@ -54,18 +53,18 @@ public class FederatedOperationChainValidatorTest {
final User user = mock(User.class);
final Operation op = mock(Operation.class);
final Schema schema = mock(Schema.class);
- given(store.getSchema(eq(getFederatedWrappedSchema()), any(Context.class))).willReturn(schema);
+ given(store.execute(any(GetSchema.class), any(Context.class))).willReturn(schema);
// When
final Schema actualSchema = validator.getSchema(op, user, store);
- verify(store).getSchema(eq(getFederatedWrappedSchema()), any(Context.class));
+ verify(store).execute(any(GetSchema.class), any(Context.class));
// Then
assertEquals(schema, actualSchema);
}
@Test
- public void shouldNotErrorWithInvalidViewFromMissingGraph() throws OperationException {
+ public void shouldNotErrorWithInvalidViewFromMissingGraph() {
//given
String missingGraph = "missingGraph";
final Graph graph = new Graph.Builder()
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToAggregateHandlerTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToAggregateHandlerTest.java
index 598d672355..41e38f1181 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToAggregateHandlerTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToAggregateHandlerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2022 Crown Copyright
+ * Copyright 2016-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -74,7 +74,7 @@ public class FederatedDelegateToAggregateHandlerTest {
@Mock final Schema schema)
throws OperationException {
// Given
- given(store.getSchema(context)).willReturn(schema);
+ given(store.getSchema(context, true)).willReturn(schema);
given(handler.doOperation(op, schema)).willReturn((Iterable) expectedResult);
final FederatedDelegateToHandler federatedHandler = new FederatedDelegateToHandler(handler);
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToFilterHandlerTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToFilterHandlerTest.java
index f456176e8a..41a50f0d6a 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToFilterHandlerTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToFilterHandlerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2022 Crown Copyright
+ * Copyright 2016-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -42,7 +42,7 @@ public class FederatedDelegateToFilterHandlerTest {
final Iterable expectedResult = mock(Iterable.class);
final Schema schema = mock(Schema.class);
- given(store.getSchema(context)).willReturn(schema);
+ given(store.getSchema(context, true)).willReturn(schema);
given(handler.doOperation(op, schema)).willReturn(expectedResult);
final FederatedDelegateToHandler federatedHandler = new FederatedDelegateToHandler(handler);
@@ -53,6 +53,6 @@ public class FederatedDelegateToFilterHandlerTest {
// Then
assertSame(expectedResult, result);
verify(handler).doOperation(op, schema);
- verify(store).getSchema(context);
+ verify(store).getSchema(context, true);
}
}
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToTransHandlerTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToTransHandlerTest.java
index a9a5eaf3f0..3f8e2a2b07 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToTransHandlerTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToTransHandlerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2022 Crown Copyright
+ * Copyright 2016-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -42,7 +42,7 @@ public class FederatedDelegateToTransHandlerTest {
final Iterable expectedResult = mock(Iterable.class);
final Schema schema = mock(Schema.class);
- given(store.getSchema(context)).willReturn(schema);
+ given(store.getSchema(context, true)).willReturn(schema);
given(handler.doOperation(op, schema)).willReturn(expectedResult);
final FederatedDelegateToHandler federatedHandler = new FederatedDelegateToHandler(handler);
@@ -53,6 +53,6 @@ public class FederatedDelegateToTransHandlerTest {
// Then
assertSame(expectedResult, result);
verify(handler).doOperation(op, schema);
- verify(store).getSchema(context);
+ verify(store).getSchema(context, true);
}
}
diff --git a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToValidateHandlerTest.java b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToValidateHandlerTest.java
index 22657ab93b..0cc2425353 100644
--- a/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToValidateHandlerTest.java
+++ b/store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToValidateHandlerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2022 Crown Copyright
+ * Copyright 2016-2023 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -42,7 +42,7 @@ public class FederatedDelegateToValidateHandlerTest {
final Iterable expectedResult = mock(Iterable.class);
final Schema schema = mock(Schema.class);
- given(store.getSchema(context)).willReturn(schema);
+ given(store.getSchema(context, true)).willReturn(schema);
given(handler.doOperation(op, schema)).willReturn(expectedResult);
final FederatedDelegateToHandler federatedHandler = new FederatedDelegateToHandler(handler);
diff --git a/store-implementation/proxy-store/src/main/java/uk/gov/gchq/gaffer/proxystore/ProxyStore.java b/store-implementation/proxy-store/src/main/java/uk/gov/gchq/gaffer/proxystore/ProxyStore.java
index 81d27b2b2c..61026697b9 100644
--- a/store-implementation/proxy-store/src/main/java/uk/gov/gchq/gaffer/proxystore/ProxyStore.java
+++ b/store-implementation/proxy-store/src/main/java/uk/gov/gchq/gaffer/proxystore/ProxyStore.java
@@ -54,6 +54,7 @@ import uk.gov.gchq.gaffer.store.StoreException;
import uk.gov.gchq.gaffer.store.StoreProperties;
import uk.gov.gchq.gaffer.store.StoreTrait;
import uk.gov.gchq.gaffer.store.TypeReferenceStoreImpl;
+import uk.gov.gchq.gaffer.store.operation.GetSchema;
import uk.gov.gchq.gaffer.store.operation.GetTraits;
import uk.gov.gchq.gaffer.store.operation.handler.GetTraitsHandler;
import uk.gov.gchq.gaffer.store.operation.handler.OperationHandler;
@@ -86,8 +87,8 @@ import static java.util.Objects.nonNull;
*/
public class ProxyStore extends Store {
private static final Logger LOGGER = LoggerFactory.getLogger(ProxyStore.class);
+ public static final String ERROR_FETCHING_SCHEMA_FROM_REMOTE_STORE = "Error fetching schema from remote store.";
private Client client;
- private Schema schema;
public ProxyStore() {
super(false);
@@ -99,9 +100,8 @@ public class ProxyStore extends Store {
throws StoreException {
setProperties(properties);
client = createClient();
- schema = fetchSchema();
- super.initialise(graphId, schema, getProperties());
+ super.initialise(graphId, new Schema(), getProperties());
checkDelegateStoreStatus();
}
@@ -158,10 +158,38 @@ public class ProxyStore extends Store {
return newTraits;
}
- protected Schema fetchSchema() throws StoreException {
- final URL url = getProperties().getGafferUrl("graph/config/schema");
- final ResponseDeserialiser<Schema> responseDeserialiser = getResponseDeserialiserFor(new TypeReferenceStoreImpl.Schema());
- return doGet(url, responseDeserialiser, null);
+ protected Schema fetchSchema(final boolean getCompactSchema) throws OperationException {
+ final GetSchema.Builder getSchema = new GetSchema.Builder();
+ getSchema.compact(getCompactSchema);
+ return executeOpChainViaUrl(new OperationChain<>(getSchema.build()), new Context());
+ }
+
+ /**
+ * Get original {@link Schema} from the remote Store.
+ *
+ * @return original {@link Schema}
+ */
+ @Override
+ public Schema getOriginalSchema() {
+ try {
+ return fetchSchema(false);
+ } catch (final OperationException e) {
+ throw new GafferRuntimeException(ERROR_FETCHING_SCHEMA_FROM_REMOTE_STORE, e);
+ }
+ }
+
+ /**
+ * Get {@link Schema} from the remote Store.
+ *
+ * @return optimised compact {@link Schema}
+ */
+ @Override
+ public Schema getSchema() {
+ try {
+ return fetchSchema(true);
+ } catch (final OperationException e) {
+ throw new GafferRuntimeException(ERROR_FETCHING_SCHEMA_FROM_REMOTE_STORE, e);
+ }
}
@Override
diff --git a/store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/integration/ProxyStoreBasicIT.java b/store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/integration/ProxyStoreBasicIT.java
index 65eeb756cc..8f0455825f 100644
--- a/store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/integration/ProxyStoreBasicIT.java
+++ b/store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/integration/ProxyStoreBasicIT.java
@@ -50,7 +50,11 @@ import uk.gov.gchq.gaffer.proxystore.ProxyStore;
import uk.gov.gchq.gaffer.proxystore.SingleUseMapProxyStore;
import uk.gov.gchq.gaffer.rest.RestApiTestClient;
import uk.gov.gchq.gaffer.rest.service.v2.RestApiV2TestClient;
+import uk.gov.gchq.gaffer.store.Store;
import uk.gov.gchq.gaffer.store.StoreTrait;
+import uk.gov.gchq.gaffer.store.operation.GetSchema;
+import uk.gov.gchq.gaffer.store.schema.Schema;
+import uk.gov.gchq.gaffer.store.schema.SchemaOptimiser;
import uk.gov.gchq.gaffer.user.User;
import java.io.File;
@@ -98,6 +102,7 @@ public class ProxyStoreBasicIT {
.build()
};
+ private Store store;
private Graph graph;
@BeforeAll
@@ -116,13 +121,14 @@ public class ProxyStoreBasicIT {
CLIENT.reinitialiseGraph(testFolder, StreamUtil.SCHEMA, "map-store.properties");
// setup ProxyStore
+ store = new ProxyStore.Builder()
+ .graphId("graph1")
+ .host("localhost")
+ .port(8080)
+ .contextRoot("rest/v2")
+ .build();
graph = new Graph.Builder()
- .store(new ProxyStore.Builder()
- .graphId("graph1")
- .host("localhost")
- .port(8080)
- .contextRoot("rest/v2")
- .build())
+ .store(store)
.build();
}
@@ -237,4 +243,36 @@ public class ProxyStoreBasicIT {
.build();
graph.execute(add, USER);
}
+
+ @Test
+ public void shouldGetOriginalSchemaUsingMethodsAndOperation() throws OperationException {
+ // Given
+ Schema storeSchema = Schema.fromJson(StreamUtil.openStream(this.getClass(), StreamUtil.SCHEMA));
+
+ // When - Get
+ final Schema returnedSchemaFromGraphMethod = graph.getSchema(); // Indirectly runs getOriginalSchema
+ final Schema returnedSchemaFromStoreMethod = store.getOriginalSchema();
+ final Schema returnedSchemaFromOperation = graph.execute(new GetSchema(), USER);
+
+ // Then
+ assertThat(returnedSchemaFromGraphMethod).isEqualTo(storeSchema);
+ assertThat(returnedSchemaFromStoreMethod).isEqualTo(storeSchema);
+ assertThat(returnedSchemaFromOperation).isEqualTo(storeSchema);
+ }
+
+ @Test
+ public void shouldGetInternalOptimisedSchemaUsingMethodAndOperation() throws OperationException {
+ // Given
+ Schema storeSchema = Schema.fromJson(StreamUtil.openStream(this.getClass(), StreamUtil.SCHEMA));
+ Schema optimisedStoreSchema = new SchemaOptimiser().optimise(storeSchema, true);
+
+ // When - Get
+ final Schema returnedSchemaFromMethod = store.getSchema();
+ GetSchema getCompactSchema = new GetSchema.Builder().compact(true).build();
+ final Schema returnedSchemaFromOperation = graph.execute(getCompactSchema, USER);
+
+ // Then
+ assertThat(returnedSchemaFromMethod).isEqualTo(optimisedStoreSchema);
+ assertThat(returnedSchemaFromOperation).isEqualTo(optimisedStoreSchema);
+ }
} | ['store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToAggregateHandlerTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToValidateHandlerTest.java', 'store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/integration/ProxyStoreBasicIT.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorage.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidator.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/FederatedStore.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreSchemaTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedStoreTest.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/FederatedDelegateToHandler.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToFilterHandlerTest.java', 'store-implementation/proxy-store/src/main/java/uk/gov/gchq/gaffer/proxystore/ProxyStore.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/FederatedGraphStorageTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/handler/impl/FederatedDelegateToTransHandlerTest.java', 'store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChainValidatorTest.java', 'store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/util/FederatedStoreUtil.java'] | {'.java': 15} | 15 | 15 | 0 | 0 | 15 | 4,037,751 | 833,176 | 107,678 | 1,103 | 8,218 | 1,690 | 151 | 6 | 742 | 103 | 150 | 7 | 0 | 0 | 2023-02-08T17:32:11 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
918 | gchq/gaffer/378/377 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/377 | https://github.com/gchq/Gaffer/pull/378 | https://github.com/gchq/Gaffer/pull/378 | 1 | fixed | SplitTableTool ignores first split in file and only reads half the splits | The following reads the first line then reads the next and adds it to the set:
```
String line = br.readLine();
while (line != null) {
splits.add(new Text(br.readLine()));
line = br.readLine();
}
```
| 315ac727537c72f71e529fd9fb06dc6d8bbb334f | bc5662ff4086657936e531782d51cb864aa6693a | https://github.com/gchq/gaffer/compare/315ac727537c72f71e529fd9fb06dc6d8bbb334f...bc5662ff4086657936e531782d51cb864aa6693a | diff --git a/accumulo-store/src/main/java/gaffer/accumulostore/operation/hdfs/handler/tool/SplitTableTool.java b/accumulo-store/src/main/java/gaffer/accumulostore/operation/hdfs/handler/tool/SplitTableTool.java
index 7ef29b307f..5e95e010b5 100644
--- a/accumulo-store/src/main/java/gaffer/accumulostore/operation/hdfs/handler/tool/SplitTableTool.java
+++ b/accumulo-store/src/main/java/gaffer/accumulostore/operation/hdfs/handler/tool/SplitTableTool.java
@@ -65,7 +65,7 @@ public class SplitTableTool extends Configured implements Tool {
new InputStreamReader(fs.open(new Path(operation.getInputPath())), CommonConstants.UTF_8))) {
String line = br.readLine();
while (line != null) {
- splits.add(new Text(br.readLine()));
+ splits.add(new Text(line));
line = br.readLine();
}
} catch (final IOException e) { | ['accumulo-store/src/main/java/gaffer/accumulostore/operation/hdfs/handler/tool/SplitTableTool.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,497,685 | 303,248 | 40,268 | 409 | 98 | 15 | 2 | 1 | 211 | 33 | 49 | 10 | 0 | 1 | 2016-08-19T08:35:15 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
914 | gchq/gaffer/2508/2421 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/2421 | https://github.com/gchq/Gaffer/pull/2508 | https://github.com/gchq/Gaffer/pull/2508 | 1 | resolve | Some stores don't allow Elements as GetElements input | **_(Originally titled: Inconsistent behaviour between Accumulostore & Mapstore)_**
The GetElements operation in the AccumuloStore (and MockAccumuloStore) seemingly accept input consisting of Edges, but MapStore (and SingleUseMapStore) don't; they insist on them being wrapped as EdgeSeeds. Admittedly the examples in the docs only show EntitySeed/EdgeSeed.
An exploration of this issue also revealed a couple of nullpointerexceptions. See https://github.com/gchq/Gaffer/tree/gh-2421-BugHunt
Further exploration found that this error happened with the Parquet store too
| 1ea1d6c0a72816b33c0ae9cd5565fd34bf32b7b0 | 7fd5c96dad55a2676d3ab7c423b04cc24daef16a | https://github.com/gchq/gaffer/compare/1ea1d6c0a72816b33c0ae9cd5565fd34bf32b7b0...7fd5c96dad55a2676d3ab7c423b04cc24daef16a | diff --git a/integration-test/src/test/java/uk/gov/gchq/gaffer/integration/impl/GetElementsIT.java b/integration-test/src/test/java/uk/gov/gchq/gaffer/integration/impl/GetElementsIT.java
index 7e1c0f5a15..ba5e43a227 100644
--- a/integration-test/src/test/java/uk/gov/gchq/gaffer/integration/impl/GetElementsIT.java
+++ b/integration-test/src/test/java/uk/gov/gchq/gaffer/integration/impl/GetElementsIT.java
@@ -56,6 +56,8 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.StreamSupport;
import static org.assertj.core.api.Assertions.assertThat;
import static uk.gov.gchq.gaffer.operation.SeedMatching.SeedMatchingType;
@@ -546,7 +548,7 @@ public class GetElementsIT extends AbstractStoreIT {
viewBuilder.edge(TestGroups.EDGE);
}
- final GetElements op = new GetElements.Builder()
+ final GetElements opSeed = new GetElements.Builder()
.input(seeds)
.directedType(directedType)
.inOutType(inOutType)
@@ -554,11 +556,24 @@ public class GetElementsIT extends AbstractStoreIT {
.seedMatching(seedMatching)
.build();
+ Collection<ElementId> seedCollection = StreamSupport.stream(seeds.spliterator(), false)
+ .collect(Collectors.toList());
+
+ final GetElements opElement = new GetElements.Builder()
+ .input(getElements(seedCollection, null))
+ .directedType(directedType)
+ .inOutType(inOutType)
+ .view(viewBuilder.build())
+ .seedMatching(seedMatching)
+ .build();
+
// When
- final CloseableIterable<? extends Element> results = graph.execute(op, user);
+ final CloseableIterable<? extends Element> resultsSeed = graph.execute(opSeed, user);
+ final CloseableIterable<? extends Element> resultsElement = graph.execute(opElement, user);
// Then
- ElementUtil.assertElementEquals(expectedElements, results, true);
+ ElementUtil.assertElementEquals(expectedElements, resultsSeed, true);
+ ElementUtil.assertElementEquals(expectedElements, resultsElement, true);
}
private static Collection<Element> getElements(final Collection<ElementId> seeds, final Boolean direction) {
diff --git a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java
index 40899f6d39..09c87c7953 100644
--- a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java
+++ b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java
@@ -22,8 +22,6 @@ import uk.gov.gchq.gaffer.data.element.Edge;
import uk.gov.gchq.gaffer.data.element.Element;
import uk.gov.gchq.gaffer.data.element.Entity;
import uk.gov.gchq.gaffer.data.element.GroupedProperties;
-import uk.gov.gchq.gaffer.data.element.id.EdgeId;
-import uk.gov.gchq.gaffer.data.element.id.EntityId;
import uk.gov.gchq.gaffer.mapstore.MapStore;
import uk.gov.gchq.gaffer.operation.OperationException;
import uk.gov.gchq.gaffer.operation.data.EdgeSeed;
@@ -146,20 +144,20 @@ public class AddElementsHandler implements OperationHandler<AddElements> {
private void updateElementIndex(final Element element, final MapImpl mapImpl) {
if (element instanceof Entity) {
final Entity entity = (Entity) element;
- final EntityId entityId = new EntitySeed(entity.getVertex());
- mapImpl.addIndex(entityId, element);
+ final EntitySeed entitySeed = new EntitySeed(entity.getVertex());
+ mapImpl.addIndex(entitySeed, element);
} else {
final Edge edge = (Edge) element;
- edge.setIdentifiers(edge.getSource(), edge.getDestination(), edge.isDirected(), EdgeId.MatchedVertex.SOURCE);
- final EntityId sourceEntityId = new EntitySeed(edge.getSource());
- mapImpl.addIndex(sourceEntityId, edge);
+ edge.setIdentifiers(edge.getSource(), edge.getDestination(), edge.isDirected(), EdgeSeed.MatchedVertex.SOURCE);
+ final EntitySeed sourceEntitySeed = new EntitySeed(edge.getSource());
+ mapImpl.addIndex(sourceEntitySeed, edge);
- final Edge destMatchedEdge = new Edge(edge.getGroup(), edge.getSource(), edge.getDestination(), edge.isDirected(), EdgeId.MatchedVertex.DESTINATION, edge.getProperties());
- final EntityId destinationEntityId = new EntitySeed(edge.getDestination());
- mapImpl.addIndex(destinationEntityId, destMatchedEdge);
+ final Edge destMatchedEdge = new Edge(edge.getGroup(), edge.getSource(), edge.getDestination(), edge.isDirected(), EdgeSeed.MatchedVertex.DESTINATION, edge.getProperties());
+ final EntitySeed destinationEntitySeed = new EntitySeed(edge.getDestination());
+ mapImpl.addIndex(destinationEntitySeed, destMatchedEdge);
- final EdgeId edgeId = new EdgeSeed(edge.getSource(), edge.getDestination(), edge.isDirected());
- mapImpl.addIndex(edgeId, edge);
+ final EdgeSeed edgeSeed = new EdgeSeed(edge.getSource(), edge.getDestination(), edge.isDirected());
+ mapImpl.addIndex(edgeSeed, edge);
}
}
}
diff --git a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/GetElementsUtil.java b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/GetElementsUtil.java
index 5da2c4236f..5984eaa663 100644
--- a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/GetElementsUtil.java
+++ b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/GetElementsUtil.java
@@ -99,12 +99,13 @@ public final class GetElementsUtil {
} else {
relevantElements = new HashSet<>();
- final EdgeId edgeId = (EdgeSeed) elementId;
+ final EdgeId edgeId = (EdgeId) elementId;
+
if (DirectedType.isEither(edgeId.getDirectedType())) {
relevantElements.addAll(mapImpl.lookup(new EdgeSeed(edgeId.getSource(), edgeId.getDestination(), false)));
relevantElements.addAll(mapImpl.lookup(new EdgeSeed(edgeId.getSource(), edgeId.getDestination(), true)));
} else {
- relevantElements.addAll(mapImpl.lookup(edgeId));
+ relevantElements.addAll(mapImpl.lookup(new EdgeSeed(edgeId.getSource(), edgeId.getDestination(), edgeId.getDirectedType())));
}
mapImpl.lookup(new EntitySeed(edgeId.getSource()))
diff --git a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/MapImpl.java b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/MapImpl.java
index 1b1a0eb2d8..6b20da41a1 100644
--- a/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/MapImpl.java
+++ b/store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/MapImpl.java
@@ -25,6 +25,8 @@ import uk.gov.gchq.gaffer.mapstore.MapStoreProperties;
import uk.gov.gchq.gaffer.mapstore.factory.MapFactory;
import uk.gov.gchq.gaffer.mapstore.factory.SimpleMapFactory;
import uk.gov.gchq.gaffer.mapstore.multimap.MultiMap;
+import uk.gov.gchq.gaffer.operation.data.EdgeSeed;
+import uk.gov.gchq.gaffer.operation.data.EntitySeed;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.SchemaElementDefinition;
import uk.gov.gchq.gaffer.store.util.AggregatorUtil;
@@ -202,12 +204,12 @@ public class MapImpl {
return Stream.concat(getAllAggElements(groups), getAllNonAggElements(groups));
}
- void addIndex(final EntityId entityId, final Element element) {
- entityIdToElements.put(entityId, element);
+ void addIndex(final EntitySeed entitySeed, final Element element) {
+ entityIdToElements.put(entitySeed, element);
}
- void addIndex(final EdgeId edgeId, final Element element) {
- edgeIdToElements.put(edgeId, element);
+ void addIndex(final EdgeSeed edgeSeed, final Element element) {
+ edgeIdToElements.put(edgeSeed, element);
}
boolean isMaintainIndex() {
diff --git a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/query/QueryGenerator.java b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/query/QueryGenerator.java
index b55dbd0a00..a7c360f4a7 100644
--- a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/query/QueryGenerator.java
+++ b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/query/QueryGenerator.java
@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
import uk.gov.gchq.gaffer.commonutil.pair.Pair;
import uk.gov.gchq.gaffer.data.element.id.DirectedType;
+import uk.gov.gchq.gaffer.data.element.id.EdgeId;
import uk.gov.gchq.gaffer.data.element.id.ElementId;
import uk.gov.gchq.gaffer.data.element.id.EntityId;
import uk.gov.gchq.gaffer.data.elementdefinition.view.View;
@@ -32,8 +33,6 @@ import uk.gov.gchq.gaffer.exception.SerialisationException;
import uk.gov.gchq.gaffer.operation.Operation;
import uk.gov.gchq.gaffer.operation.OperationException;
import uk.gov.gchq.gaffer.operation.SeedMatching;
-import uk.gov.gchq.gaffer.operation.data.EdgeSeed;
-import uk.gov.gchq.gaffer.operation.data.EntitySeed;
import uk.gov.gchq.gaffer.operation.graph.SeededGraphFilters;
import uk.gov.gchq.gaffer.operation.impl.get.GetAllElements;
import uk.gov.gchq.gaffer.operation.impl.get.GetElements;
@@ -279,11 +278,11 @@ public class QueryGenerator {
} else {
column = ParquetStore.SOURCE;
}
- if (seed instanceof EntitySeed) {
- return new ParquetEntitySeed(seed, converter.gafferObjectToParquetObjects(column, ((EntitySeed) seed).getVertex()));
+ if (seed instanceof EntityId) {
+ return new ParquetEntitySeed(seed, converter.gafferObjectToParquetObjects(column, ((EntityId) seed).getVertex()));
} else {
- return converter.edgeIdToParquetObjects((EdgeSeed) seed);
+ return converter.edgeIdToParquetObjects((EdgeId) seed);
}
}
| ['store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/GetElementsUtil.java', 'store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/AddElementsHandler.java', 'integration-test/src/test/java/uk/gov/gchq/gaffer/integration/impl/GetElementsIT.java', 'store-implementation/map-store/src/main/java/uk/gov/gchq/gaffer/mapstore/impl/MapImpl.java', 'store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/query/QueryGenerator.java'] | {'.java': 5} | 5 | 5 | 0 | 0 | 5 | 4,513,605 | 931,459 | 118,926 | 1,211 | 3,430 | 705 | 46 | 4 | 581 | 69 | 133 | 9 | 1 | 0 | 2021-09-17T15:10:13 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
916 | gchq/gaffer/1922/1921 | gchq | gaffer | https://github.com/gchq/Gaffer/issues/1921 | https://github.com/gchq/Gaffer/pull/1922 | https://github.com/gchq/Gaffer/pull/1922 | 1 | fixed | ImportRDDOfElements on Parquet store fails when used on a HDFS cluster | The `ImportRDDOfElements` operation on the Parquet store passes all the unit tests which run on a local file system but fails when run on HDFS running on a cluster. The line
fs.rename(new Path(tempDir + "/AddElementsFromRDDTemp/sorted_aggregated_new/"), new Path(newDataDir));
in `AddElementsFromRDD` fails to move the directory as intended (the sorted_aggregated_new directory is moved into the new directory). This means that the group directories cannot be found when the partition is calculated. | 2b683d0064346e253aa97f39d5c7a5a81153cbc4 | 7851a2d93f2e931580f0b02a8673d1f26600b70f | https://github.com/gchq/gaffer/compare/2b683d0064346e253aa97f39d5c7a5a81153cbc4...7851a2d93f2e931580f0b02a8673d1f26600b70f | diff --git a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/handler/spark/AddElementsFromRDD.java b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/handler/spark/AddElementsFromRDD.java
index d782452152..293e0f39b5 100644
--- a/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/handler/spark/AddElementsFromRDD.java
+++ b/store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/handler/spark/AddElementsFromRDD.java
@@ -170,11 +170,16 @@ public class AddElementsFromRDD {
// the replacement of the old data with the new data an atomic operation and ensures that a get operation
// against the store will not read the directory when only some of the data has been moved there).
final long snapshot = System.currentTimeMillis();
- final String newDataDir = store.getDataDir() + "/" + ParquetStore.getSnapshotPath(snapshot) + "-tmp";
+ final String newDataDir = store.getDataDir() + "/" + ParquetStore.getSnapshotPath(snapshot) + "-tmp/";
LOGGER.info("Moving aggregated and sorted data to new snapshot directory {}", newDataDir);
+ LOGGER.info("Making directory {}", newDataDir);
fs.mkdirs(new Path(newDataDir));
- fs.rename(new Path(tempDir + "/AddElementsFromRDDTemp/sorted_aggregated_new/"),
- new Path(newDataDir));
+ final FileStatus[] fss = fs.listStatus(new Path(tempDir + "/AddElementsFromRDDTemp/sorted_aggregated_new/"));
+ for (int i = 0; i < fss.length; i++) {
+ final Path destination = new Path(newDataDir, fss[i].getPath().getName());
+ fs.rename(fss[i].getPath(), destination);
+ LOGGER.info("Renamed {} to {}", fss[i].getPath(), destination);
+ }
// Move snapshot-tmp directory to snapshot
final String directoryWithoutTmp = newDataDir.substring(0, newDataDir.lastIndexOf("-tmp")); | ['store-implementation/parquet-store/src/main/java/uk/gov/gchq/gaffer/parquetstore/operation/handler/spark/AddElementsFromRDD.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,106,662 | 848,795 | 107,983 | 1,116 | 850 | 184 | 11 | 1 | 509 | 69 | 110 | 5 | 0 | 0 | 2018-10-19T14:32:22 | 1,711 | Java | {'Java': 9333788, 'JavaScript': 2752310, 'Shell': 15069, 'HTML': 3332, 'CSS': 505} | Apache License 2.0 |
563 | spring-cloud/spring-cloud-sleuth/1612/1610 | spring-cloud | spring-cloud-sleuth | https://github.com/spring-cloud/spring-cloud-sleuth/issues/1610 | https://github.com/spring-cloud/spring-cloud-sleuth/pull/1612 | https://github.com/spring-cloud/spring-cloud-sleuth/pull/1612#issuecomment-610465799 | 1 | fixes | For LoadBalancerFeignClient extensions, the calls fails due to "double load-balancing" | The issue is in the following code:
https://github.com/spring-cloud/spring-cloud-sleuth/blob/37645fbf9d3e50dc341f333cf8bfd35189594798/spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceFeignObjectWrapper.java#L85-L100
Instead of passing the delegate of the load-balanced client as the first argument of `TraceLoadBalancerFeignClient` (which itself implements `LoadBalancerFeignClient`), the actual load-balanced client is passed.
This results in the call being load-balanced first by the `TraceLoadBalancerFeignClient` `super.execute(request, options)` call, which already resolves the `clientId` to a correct host IP and then again by that load-balanced client passed as the delegate, which fails because it cannot resolve a host IP from what is already a host IP. | fa6eb209141b5dde8ade17762d27285d0d790309 | 20e29f2d805f8d90091039c900ffa11eb17cc6d4 | https://github.com/spring-cloud/spring-cloud-sleuth/compare/fa6eb209141b5dde8ade17762d27285d0d790309...20e29f2d805f8d90091039c900ffa11eb17cc6d4 | diff --git a/spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceLoadBalancerFeignClient.java b/spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceLoadBalancerFeignClient.java
index d27656fb3..8644610e7 100644
--- a/spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceLoadBalancerFeignClient.java
+++ b/spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceLoadBalancerFeignClient.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.cloud.netflix.ribbon.SpringClientFactory;
+import org.springframework.cloud.openfeign.loadbalancer.FeignBlockingLoadBalancerClient;
import org.springframework.cloud.openfeign.ribbon.CachingSpringLoadBalancerFactory;
import org.springframework.cloud.openfeign.ribbon.LoadBalancerFeignClient;
@@ -67,7 +68,12 @@ public class TraceLoadBalancerFeignClient extends LoadBalancerFeignClient {
Response response = null;
Span fallbackSpan = tracer().nextSpan().start();
try {
- response = super.execute(request, options);
+ if (delegateIsALoadBalancer()) {
+ response = getDelegate().execute(request, options);
+ }
+ else {
+ response = super.execute(request, options);
+ }
if (log.isDebugEnabled()) {
log.debug("After receive");
}
@@ -95,6 +101,11 @@ public class TraceLoadBalancerFeignClient extends LoadBalancerFeignClient {
}
}
+ private boolean delegateIsALoadBalancer() {
+ return getDelegate() instanceof LoadBalancerFeignClient
+ || getDelegate() instanceof FeignBlockingLoadBalancerClient;
+ }
+
private Tracer tracer() {
if (this.tracer == null) {
this.tracer = this.beanFactory.getBean(Tracer.class);
diff --git a/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125/ManuallyCreatedLoadBalancerFeignClientTests.java b/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125/ManuallyCreatedLoadBalancerFeignClientTests.java
index 2d60c1d3c..b26ecce6b 100644
--- a/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125/ManuallyCreatedLoadBalancerFeignClientTests.java
+++ b/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125/ManuallyCreatedLoadBalancerFeignClientTests.java
@@ -59,10 +59,10 @@ import static org.assertj.core.api.BDDAssertions.then;
public class ManuallyCreatedLoadBalancerFeignClientTests {
@Autowired
- MyClient myClient;
+ MyLoadBalancerClient myLoadBalancerClient;
@Autowired
- MyNameRemote myNameRemote;
+ AnnotatedFeignClient annotatedFeignClient;
@Autowired
ArrayListSpanReporter reporter;
@@ -74,29 +74,29 @@ public class ManuallyCreatedLoadBalancerFeignClientTests {
@Test
public void should_reuse_custom_feign_client() {
- String response = this.myNameRemote.get();
+ String response = this.annotatedFeignClient.get();
- then(this.myClient.wasCalled()).isTrue();
+ then(this.myLoadBalancerClient.wasCalled()).isTrue();
then(response).isEqualTo("foo");
List<Span> spans = this.reporter.getSpans();
// retries
then(spans).hasSize(1);
- then(spans.get(0).tags().get("http.path")).isEqualTo("/");
+ then(spans.get(0).tags().get("http.path")).isEqualTo("/test");
}
@Test
public void my_client_called() {
- this.myNameRemote.get();
- then(this.myClient.wasCalled()).isTrue();
+ this.annotatedFeignClient.get();
+ then(this.myLoadBalancerClient.wasCalled()).isTrue();
}
@Test
public void span_captured() {
- this.myNameRemote.get();
+ this.annotatedFeignClient.get();
List<Span> spans = this.reporter.getSpans();
// retries
then(spans).hasSize(1);
- then(spans.get(0).tags().get("http.path")).isEqualTo("/");
+ then(spans.get(0).tags().get("http.path")).isEqualTo("/test");
}
}
@@ -109,7 +109,8 @@ class Application {
@Bean
public Client client(CachingSpringLoadBalancerFactory cachingFactory,
SpringClientFactory clientFactory) {
- return new MyClient(new MyDelegateClient(), cachingFactory, clientFactory);
+ return new MyLoadBalancerClient(new MyDelegateClient(), cachingFactory,
+ clientFactory);
}
@Bean
@@ -124,9 +125,10 @@ class Application {
}
-class MyClient extends LoadBalancerFeignClient {
+class MyLoadBalancerClient extends LoadBalancerFeignClient {
- MyClient(Client delegate, CachingSpringLoadBalancerFactory lbClientFactory,
+ MyLoadBalancerClient(Client delegate,
+ CachingSpringLoadBalancerFactory lbClientFactory,
SpringClientFactory clientFactory) {
super(delegate, lbClientFactory, clientFactory);
}
@@ -161,9 +163,9 @@ class MyDelegateClient implements Client {
}
@FeignClient(name = "foo", url = "http://foo")
-interface MyNameRemote {
+interface AnnotatedFeignClient {
- @RequestMapping(value = "/", method = RequestMethod.GET)
+ @RequestMapping(value = "/test", method = RequestMethod.GET)
String get();
}
diff --git a/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125delegates/ManuallyCreatedDelegateLoadBalancerFeignClientTests.java b/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125delegates/ManuallyCreatedDelegateLoadBalancerFeignClientTests.java
index b3a0f8a61..d28b39527 100644
--- a/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125delegates/ManuallyCreatedDelegateLoadBalancerFeignClientTests.java
+++ b/tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125delegates/ManuallyCreatedDelegateLoadBalancerFeignClientTests.java
@@ -65,13 +65,13 @@ import static org.assertj.core.api.BDDAssertions.then;
public class ManuallyCreatedDelegateLoadBalancerFeignClientTests {
@Autowired
- MyClient myClient;
+ MyLoadBalancerClient myLoadBalancerClient;
@Autowired
MyDelegateClient myDelegateClient;
@Autowired
- MyNameRemote myNameRemote;
+ AnnotatedFeignClient annotatedFeignClient;
@Autowired
ArrayListSpanReporter reporter;
@@ -83,31 +83,31 @@ public class ManuallyCreatedDelegateLoadBalancerFeignClientTests {
@Test
public void should_reuse_custom_feign_client() {
- String response = this.myNameRemote.get();
+ String response = this.annotatedFeignClient.get();
- then(this.myClient.wasCalled()).isTrue();
+ then(this.myLoadBalancerClient.wasCalled()).isTrue();
then(this.myDelegateClient.wasCalled()).isTrue();
then(response).isEqualTo("foo");
List<Span> spans = this.reporter.getSpans();
// retries
then(spans).hasSize(1);
- then(spans.get(0).tags().get("http.path")).isEqualTo("/");
+ then(spans.get(0).tags().get("http.path")).isEqualTo("/test");
}
@Test
public void my_client_called() {
- this.myNameRemote.get();
- then(this.myClient.wasCalled()).isTrue();
+ this.annotatedFeignClient.get();
+ then(this.myLoadBalancerClient.wasCalled()).isTrue();
then(this.myDelegateClient.wasCalled()).isTrue();
}
@Test
public void span_captured() {
- this.myNameRemote.get();
+ this.annotatedFeignClient.get();
List<Span> spans = this.reporter.getSpans();
// retries
then(spans).hasSize(1);
- then(spans.get(0).tags().get("http.path")).isEqualTo("/");
+ then(spans.get(0).tags().get("http.path")).isEqualTo("/test");
}
}
@@ -126,15 +126,15 @@ class Application {
public Client client(MyDelegateClient myDelegateClient,
CachingSpringLoadBalancerFactory cachingFactory,
SpringClientFactory clientFactory) {
- return new MyClient(myDelegateClient, cachingFactory, clientFactory);
+ return new MyLoadBalancerClient(myDelegateClient, cachingFactory, clientFactory);
}
@Bean
- public MyNameRemote myNameRemote(Client client, Decoder decoder, Encoder encoder,
+ public AnnotatedFeignClient annotatedFeignClient(Client client, Decoder decoder, Encoder encoder,
Contract contract) {
return Feign.builder().client(client).encoder(encoder).decoder(decoder)
.contract(contract)
- .target(new HardCodedTarget<>(MyNameRemote.class, "foo", "http://foo"));
+ .target(new HardCodedTarget<>(AnnotatedFeignClient.class, "foo", "http://foo"));
}
@Bean
@@ -149,9 +149,9 @@ class Application {
}
-class MyClient extends LoadBalancerFeignClient {
+class MyLoadBalancerClient extends LoadBalancerFeignClient {
- MyClient(Client delegate, CachingSpringLoadBalancerFactory lbClientFactory,
+ MyLoadBalancerClient(Client delegate, CachingSpringLoadBalancerFactory lbClientFactory,
SpringClientFactory clientFactory) {
super(delegate, lbClientFactory, clientFactory);
}
@@ -190,9 +190,9 @@ class MyDelegateClient implements Client {
}
@FeignClient(name = "foo", url = "http://foo")
-interface MyNameRemote {
+interface AnnotatedFeignClient {
- @RequestMapping(value = "/", method = RequestMethod.GET)
+ @RequestMapping(value = "/test", method = RequestMethod.GET)
String get();
} | ['tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125delegates/ManuallyCreatedDelegateLoadBalancerFeignClientTests.java', 'spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/instrument/web/client/feign/TraceLoadBalancerFeignClient.java', 'tests/spring-cloud-sleuth-instrumentation-feign-tests/src/test/java/org/springframework/cloud/sleuth/instrument/feign/issues/issue1125/ManuallyCreatedLoadBalancerFeignClientTests.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 689,891 | 150,801 | 22,248 | 210 | 480 | 103 | 13 | 1 | 829 | 84 | 199 | 7 | 1 | 0 | 2020-04-07T15:47:33 | 1,703 | Java | {'Java': 3268067, 'JavaScript': 107117, 'CSS': 37863, 'Kotlin': 13342, 'HTML': 3070, 'Shell': 1787} | Apache License 2.0 |
557 | embulk/embulk/85/55 | embulk | embulk | https://github.com/embulk/embulk/issues/55 | https://github.com/embulk/embulk/pull/85 | https://github.com/embulk/embulk/pull/85 | 1 | fixes | Add filter process in preview(dry-run) mode. | Current preview command run input stage only.
I would like to check after filter mode result.
https://twitter.com/frsyuki/status/563553691475533826
| 3466c287e68a33fa4c02a5f94748655a17281bcb | 163346f8231e022a23ee337e9c720ec6cddc416a | https://github.com/embulk/embulk/compare/3466c287e68a33fa4c02a5f94748655a17281bcb...163346f8231e022a23ee337e9c720ec6cddc416a | diff --git a/embulk-core/src/main/java/org/embulk/exec/PreviewExecutor.java b/embulk-core/src/main/java/org/embulk/exec/PreviewExecutor.java
index 24200b9e..fe1a4cec 100644
--- a/embulk-core/src/main/java/org/embulk/exec/PreviewExecutor.java
+++ b/embulk-core/src/main/java/org/embulk/exec/PreviewExecutor.java
@@ -18,9 +18,11 @@ import org.embulk.spi.Page;
import org.embulk.spi.PageOutput;
import org.embulk.spi.PageReader;
import org.embulk.spi.InputPlugin;
+import org.embulk.spi.FilterPlugin;
import org.embulk.spi.Exec;
import org.embulk.spi.ExecSession;
import org.embulk.spi.ExecAction;
+import org.embulk.spi.util.Filters;
public class PreviewExecutor
{
@@ -34,6 +36,11 @@ public class PreviewExecutor
@NotNull
public ConfigSource getInputConfig();
+ @Config("filters")
+ @ConfigDefault("[]")
+ public List<ConfigSource> getFilterConfigs();
+
+ // TODO take preview_sample_rows from exec: config
@Config("preview_sample_rows")
@ConfigDefault("15")
public int getSampleRows();
@@ -69,19 +76,38 @@ public class PreviewExecutor
return Exec.newPlugin(InputPlugin.class, task.getInputConfig().get(PluginType.class, "type"));
}
+ protected List<FilterPlugin> newFilterPlugins(PreviewTask task)
+ {
+ return Filters.newFilterPlugins(Exec.session(), task.getFilterConfigs());
+ }
+
private PreviewResult doPreview(ConfigSource config)
{
final PreviewTask task = config.loadConfig(PreviewTask.class);
- InputPlugin input = newInputPlugin(task);
+ final InputPlugin input = newInputPlugin(task);
+ final List<FilterPlugin> filterPlugins = newFilterPlugins(task);
try {
input.transaction(task.getInputConfig(), new InputPlugin.Control() {
- public List<CommitReport> run(TaskSource taskSource, Schema schema, int processorCount)
+ public List<CommitReport> run(final TaskSource inputTask, Schema inputSchema, int processorCount)
{
- InputPlugin input = newInputPlugin(task);
- try (SamplingPageOutput out = new SamplingPageOutput(task.getSampleRows(), schema)) {
- input.run(taskSource, schema, 0, out);
- }
+ Filters.transaction(filterPlugins, task.getFilterConfigs(), inputSchema, new Filters.Control() {
+ public void run(final List<TaskSource> filterTasks, final List<Schema> filterSchemas)
+ {
+ InputPlugin input = newInputPlugin(task);
+ List<FilterPlugin> filterPlugins = newFilterPlugins(task);
+ Schema filteredSchema = filterSchemas.get(filterSchemas.size() - 1);
+
+ PageOutput out = new SamplingPageOutput(task.getSampleRows(), filteredSchema);
+ try {
+ out = Filters.open(filterPlugins, filterTasks, filterSchemas, out);
+ input.run(inputTask, filteredSchema, 0, out);
+ } finally {
+ out.close();
+ }
+ }
+ });
+ // program never reaches here because SamplingPageOutput.finish throws an error.
throw new NoSampleException("No input records to preview");
}
});
@@ -130,8 +156,11 @@ public class PreviewExecutor
@Override
public void close()
{
- for (Page page : pages) {
- page.release();
+ if (pages != null) {
+ for (Page page : pages) {
+ page.release();
+ }
+ pages = null;
}
}
} | ['embulk-core/src/main/java/org/embulk/exec/PreviewExecutor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 284,683 | 54,908 | 9,474 | 126 | 2,421 | 416 | 45 | 1 | 151 | 17 | 36 | 5 | 1 | 0 | 2015-02-18T20:45:39 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
552 | embulk/embulk/1034/1033 | embulk | embulk | https://github.com/embulk/embulk/issues/1033 | https://github.com/embulk/embulk/pull/1034 | https://github.com/embulk/embulk/pull/1034 | 1 | fix | Embulk v0.9.7 can't parse milliseconds part in `%s.%N` and `%s.%L` format. |
Reported by [ntrgiang](https://github.com/embulk/embulk/issues/467#issuecomment-416636390)
Embulk v0.9.7 can't parse milliseconds part in `%s.%N` and `%s.%L` format.
Embulk v0.8.39 can parse it.
[Reporduce code](https://github.com/hiroyuki-sato/embulk-support/tree/master/core_467_timestamp_format/ntrgiang)
```
2018-08-29 19:05:16.852 +0900: Embulk v0.8.39
+-----------------+-----------------------------+------------+-------------+
| key_name:string | day:timestamp | value:long | name:string |
+-----------------+-----------------------------+------------+-------------+
| 2 new_clients | 2018-04-06 11:06:02.001 UTC | 1 | a |
| 3 new_clients | 2018-04-06 11:06:03.001 UTC | 2 | b |
| 4 new_clients | 2018-04-06 11:06:04.001 UTC | 3 | c |
+-----------------+-----------------------------+------------+-------------+
```
```
2018-08-29 19:05:50.172 +0900: Embulk v0.9.7
+-----------------+-------------------------+------------+-------------+
| key_name:string | day:timestamp | value:long | name:string |
+-----------------+-------------------------+------------+-------------+
| 2 new_clients | 2018-04-06 11:06:02 UTC | 1 | a |
| 3 new_clients | 2018-04-06 11:06:03 UTC | 2 | b |
| 4 new_clients | 2018-04-06 11:06:04 UTC | 3 | c |
+-----------------+-------------------------+------------+-------------+
``` | 81ca98f231b7c18ea3ee77dd76c6ebcb8a65ea0c | fc02f7e79d97dc3cba42e20e4645298d245b14c2 | https://github.com/embulk/embulk/compare/81ca98f231b7c18ea3ee77dd76c6ebcb8a65ea0c...fc02f7e79d97dc3cba42e20e4645298d245b14c2 | diff --git a/embulk-core/src/main/java/org/embulk/spi/time/RubyTimeParsed.java b/embulk-core/src/main/java/org/embulk/spi/time/RubyTimeParsed.java
index 8f88293d..c486254e 100644
--- a/embulk-core/src/main/java/org/embulk/spi/time/RubyTimeParsed.java
+++ b/embulk-core/src/main/java/org/embulk/spi/time/RubyTimeParsed.java
@@ -586,20 +586,37 @@ class RubyTimeParsed extends TimeParsed {
}
if (this.instantSeconds != null) {
- // Fractions by %Q are prioritized over fractions by %N.
- // irb(main):002:0> Time.strptime("123456789 12.345", "%Q %S.%N").nsec
- // => 789000000
- // irb(main):003:0> Time.strptime("12.345 123456789", "%S.%N %Q").nsec
- // => 789000000
- // irb(main):004:0> Time.strptime("12.345", "%S.%N").nsec
- // => 345000000
if (!defaultZoneOffset.equals(ZoneOffset.UTC)) {
// TODO: Warn that a default time zone is specified for epoch seconds.
}
if (this.timeZoneName != null) {
// TODO: Warn that the epoch second has a time zone.
}
- return this.instantSeconds;
+
+ // The fraction part is "added" to the epoch second in case both are specified.
+ // irb(main):002:0> Time.strptime("1500000000.123456789", "%s.%N").nsec
+ // => 123456789
+ // irb(main):003:0> Time.strptime("1500000000456.111111111", "%Q.%N").nsec
+ // => 567111111
+ //
+ // If "sec_fraction" is specified, the value is used like |Time.at(seconds, sec_fraction * 1000000)|.
+ // https://svn.ruby-lang.org/cgi-bin/viewvc.cgi/tags/v2_3_1/lib/time.rb?view=markup#l427
+ //
+ // |Time.at| adds "seconds" (the epoch) and "sec_fraction" (the fraction part) with scaling.
+ // https://svn.ruby-lang.org/cgi-bin/viewvc.cgi/tags/v2_3_1/time.c?view=markup#l2528
+ //
+ // It behaves the same even if "seconds" is specified as a Rational, not an Integer.
+ // irb(main):004:0> Time.at(Rational(1500000000789, 1000), 100123).nsec
+ // => 889123000
+ if (this.nanoOfSecond != Integer.MIN_VALUE) {
+ if (this.instantSeconds.getEpochSecond() >= 0) {
+ return this.instantSeconds.plusNanos(this.nanoOfSecond);
+ } else {
+ return this.instantSeconds.minusNanos(this.nanoOfSecond);
+ }
+ } else {
+ return this.instantSeconds;
+ }
}
// Day of the year (yday: DAY_OF_YEAR) is not considered in Time.strptime, not like DateTime.strptime.
@@ -643,20 +660,37 @@ class RubyTimeParsed extends TimeParsed {
final int defaultDayOfMonth,
final ZoneId defaultZoneId) {
if (this.instantSeconds != null) {
- // Fractions by %Q are prioritized over fractions by %N.
- // irb(main):002:0> Time.strptime("123456789 12.345", "%Q %S.%N").nsec
- // => 789000000
- // irb(main):003:0> Time.strptime("12.345 123456789", "%S.%N %Q").nsec
- // => 789000000
- // irb(main):004:0> Time.strptime("12.345", "%S.%N").nsec
- // => 345000000
if (!defaultZoneId.equals(ZoneOffset.UTC)) {
// TODO: Warn that a default time zone is specified for epoch seconds.
}
if (this.timeZoneName != null) {
// TODO: Warn that the epoch second has a time zone.
}
- return this.instantSeconds;
+
+ // The fraction part is "added" to the epoch second in case both are specified.
+ // irb(main):002:0> Time.strptime("1500000000.123456789", "%s.%N").nsec
+ // => 123456789
+ // irb(main):003:0> Time.strptime("1500000000456.111111111", "%Q.%N").nsec
+ // => 567111111
+ //
+ // If "sec_fraction" is specified, the value is used like |Time.at(seconds, sec_fraction * 1000000)|.
+ // https://svn.ruby-lang.org/cgi-bin/viewvc.cgi/tags/v2_3_1/lib/time.rb?view=markup#l427
+ //
+ // |Time.at| adds "seconds" (the epoch) and "sec_fraction" (the fraction part) with scaling.
+ // https://svn.ruby-lang.org/cgi-bin/viewvc.cgi/tags/v2_3_1/time.c?view=markup#l2528
+ //
+ // It behaves the same even if "seconds" is specified as a Rational, not an Integer.
+ // irb(main):004:0> Time.at(Rational(1500000000789, 1000), 100123).nsec
+ // => 889123000
+ if (this.nanoOfSecond != Integer.MIN_VALUE) {
+ if (this.instantSeconds.getEpochSecond() >= 0) {
+ return this.instantSeconds.plusNanos(this.nanoOfSecond);
+ } else {
+ return this.instantSeconds.minusNanos(this.nanoOfSecond);
+ }
+ } else {
+ return this.instantSeconds;
+ }
}
final ZoneId zoneId;
diff --git a/embulk-core/src/test/java/org/embulk/spi/time/TestTimestampParser.java b/embulk-core/src/test/java/org/embulk/spi/time/TestTimestampParser.java
index db3e8f6c..1cca67bf 100644
--- a/embulk-core/src/test/java/org/embulk/spi/time/TestTimestampParser.java
+++ b/embulk-core/src/test/java/org/embulk/spi/time/TestTimestampParser.java
@@ -624,6 +624,32 @@ public class TestTimestampParser {
testToParse("-1000", "%Q", -1L);
}
+ @Test
+ public void testEpochWithFraction() {
+ testToParse("1500000000.123456789", "%s.%N", 1500000000L, 123456789);
+ testToParse("1500000000456.111111111", "%Q.%N", 1500000000L, 567111111);
+ testToParse("1500000000.123", "%s.%L", 1500000000L, 123000000);
+ testToParse("1500000000456.111", "%Q.%L", 1500000000L, 567000000);
+
+ testToParse("1.5", "%s.%N", 1L, 500000000);
+ testToParse("-1.5", "%s.%N", -2L, 500000000);
+ testToParse("1.000000001", "%s.%N", 1L, 1);
+ testToParse("-1.000000001", "%s.%N", -2L, 999999999);
+ }
+
+ @Test
+ public void testRubyEpochWithFraction() {
+ testRubyToParse("1500000000.123456789", "%s.%N", 1500000000L, 123456789);
+ testRubyToParse("1500000000456.111111111", "%Q.%N", 1500000000L, 567111111);
+ testRubyToParse("1500000000.123", "%s.%L", 1500000000L, 123000000);
+ testRubyToParse("1500000000456.111", "%Q.%L", 1500000000L, 567000000);
+
+ testRubyToParse("1.5", "%s.%N", 1L, 500000000);
+ testRubyToParse("-1.5", "%s.%N", -2L, 500000000);
+ testRubyToParse("1.000000001", "%s.%N", 1L, 1);
+ testRubyToParse("-1.000000001", "%s.%N", -2L, 999999999);
+ }
+
private void testJavaToParse(final String string, final String format, final long second, final int nanoOfSecond) {
final TimestampParser parser = TimestampParser.of("java:" + format, "UTC");
final Timestamp timestamp = parser.parse(string); | ['embulk-core/src/test/java/org/embulk/spi/time/TestTimestampParser.java', 'embulk-core/src/main/java/org/embulk/spi/time/RubyTimeParsed.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 977,762 | 186,819 | 25,834 | 242 | 3,705 | 978 | 66 | 1 | 1,498 | 131 | 450 | 29 | 2 | 2 | 2018-09-11T06:25:36 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
553 | embulk/embulk/951/949 | embulk | embulk | https://github.com/embulk/embulk/issues/949 | https://github.com/embulk/embulk/pull/951 | https://github.com/embulk/embulk/pull/951 | 2 | fix | Plugin's Task causes JsonMappingException when the Task extends TimestampFormatter.Task with @Config("default_timezone") | When a plugin has its `Task` interface which extends core's `Task` interface with `default` methods for compatibility, it causes `JsonMappingException` such as:
```
com.fasterxml.jackson.databind.JsonMappingException: Multiple entries with same key: default_timezone=org.embulk.config.TaskSerDe$TaskDeserializer$FieldEntry@2548fc01 and default_timezone=org.embulk.config.TaskSerDe$TaskDeserializer$FieldEntry@4331be30
```
embulk-filter-calcite is the case.
https://github.com/muga/embulk-filter-calcite/blob/v0.1.3/src/main/java/org/embulk/filter/calcite/CalciteFilterPlugin.java#L246-L248
The direct cause is that the extended `interface` has two getter methods with the same `@Config("default_timezone")`.
Options to fix it:
1. If the parent's method has `default` implementation, and it does not have `@Config`, the overridden method behaves like it does not have `@Config`. In calcite's case, `getDefaultTimeZone` behaves like it does not have `@Config`.
2. Multiple getters with the same `@Config` configuration names are allowed. | 128ac124a941223c86c33bd43523f0eca5d9d066 | ddd49c21fe744c71e4131e255d169c772a01dacd | https://github.com/embulk/embulk/compare/128ac124a941223c86c33bd43523f0eca5d9d066...ddd49c21fe744c71e4131e255d169c772a01dacd | diff --git a/embulk-core/src/main/java/org/embulk/config/TaskInvocationHandler.java b/embulk-core/src/main/java/org/embulk/config/TaskInvocationHandler.java
index 53fc4d35..a652c44a 100644
--- a/embulk-core/src/main/java/org/embulk/config/TaskInvocationHandler.java
+++ b/embulk-core/src/main/java/org/embulk/config/TaskInvocationHandler.java
@@ -1,6 +1,7 @@
package org.embulk.config;
-import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableMultimap;
+import com.google.common.collect.Multimap;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
@@ -24,10 +25,12 @@ class TaskInvocationHandler implements InvocationHandler {
}
/**
- * fieldName = Method of the getter
+ * Returns a Multimap from fieldName Strings to their getter Methods.
+ *
+ * It expects to be called only from TaskSerDe. Multimap is used inside org.embulk.config.
*/
- public static Map<String, Method> fieldGetters(Class<?> iface) {
- ImmutableMap.Builder<String, Method> builder = ImmutableMap.builder();
+ static Multimap<String, Method> fieldGetters(Class<?> iface) {
+ ImmutableMultimap.Builder<String, Method> builder = ImmutableMultimap.builder();
for (Method method : iface.getMethods()) {
String methodName = method.getName();
String fieldName = getterFieldNameOrNull(methodName);
diff --git a/embulk-core/src/main/java/org/embulk/config/TaskSerDe.java b/embulk-core/src/main/java/org/embulk/config/TaskSerDe.java
index 28df6199..677a6486 100644
--- a/embulk-core/src/main/java/org/embulk/config/TaskSerDe.java
+++ b/embulk-core/src/main/java/org/embulk/config/TaskSerDe.java
@@ -10,6 +10,7 @@ import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -17,14 +18,16 @@ import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.deser.Deserializers;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.base.Optional;
+import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Multimap;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
-import java.util.HashMap;
+import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -66,7 +69,7 @@ class TaskSerDe {
private final ObjectMapper nestedObjectMapper;
private final ModelManager model;
private final Class<?> iface;
- private final Map<String, FieldEntry> mappings;
+ private final Multimap<String, FieldEntry> mappings;
private final List<InjectEntry> injects;
public TaskDeserializer(ObjectMapper nestedObjectMapper, ModelManager model, Class<T> iface) {
@@ -77,9 +80,9 @@ class TaskSerDe {
this.injects = injectEntries(iface);
}
- protected Map<String, FieldEntry> getterMappings(Class<?> iface) {
- ImmutableMap.Builder<String, FieldEntry> builder = ImmutableMap.builder();
- for (Map.Entry<String, Method> getter : TaskInvocationHandler.fieldGetters(iface).entrySet()) {
+ protected Multimap<String, FieldEntry> getterMappings(Class<?> iface) {
+ ImmutableMultimap.Builder<String, FieldEntry> builder = ImmutableMultimap.builder();
+ for (Map.Entry<String, Method> getter : TaskInvocationHandler.fieldGetters(iface).entries()) {
Method getterMethod = getter.getValue();
String fieldName = getter.getKey();
@@ -103,7 +106,7 @@ class TaskSerDe {
protected List<InjectEntry> injectEntries(Class<?> iface) {
ImmutableList.Builder<InjectEntry> builder = ImmutableList.builder();
- for (Map.Entry<String, Method> getter : TaskInvocationHandler.fieldGetters(iface).entrySet()) {
+ for (Map.Entry<String, Method> getter : TaskInvocationHandler.fieldGetters(iface).entries()) {
Method getterMethod = getter.getValue();
String fieldName = getter.getKey();
ConfigInject inject = getterMethod.getAnnotation(ConfigInject.class);
@@ -127,7 +130,7 @@ class TaskSerDe {
@SuppressWarnings("unchecked")
public T deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
Map<String, Object> objects = new ConcurrentHashMap<String, Object>();
- HashMap<String, FieldEntry> unusedMappings = new HashMap<>(mappings);
+ HashMultimap<String, FieldEntry> unusedMappings = HashMultimap.<String, FieldEntry>create(mappings);
String key;
JsonToken current = jp.getCurrentToken();
@@ -140,21 +143,31 @@ class TaskSerDe {
for (; key != null; key = jp.nextFieldName()) {
JsonToken t = jp.nextToken(); // to get to value
- FieldEntry field = mappings.get(key);
- if (field == null) {
+ final Collection<FieldEntry> fields = mappings.get(key);
+ if (fields.isEmpty()) {
jp.skipChildren();
} else {
- Object value = nestedObjectMapper.readValue(jp, new GenericTypeReference(field.getType()));
- if (value == null) {
- throw new JsonMappingException("Setting null to a task field is not allowed. Use Optional<T> (com.google.common.base.Optional) to represent null.");
+ final JsonNode children = nestedObjectMapper.readValue(jp, JsonNode.class);
+ for (final FieldEntry field : fields) {
+ final Object value = nestedObjectMapper.convertValue(children, new GenericTypeReference(field.getType()));
+ if (value == null) {
+ throw new JsonMappingException("Setting null to a task field is not allowed. Use Optional<T> (com.google.common.base.Optional) to represent null.");
+ }
+ objects.put(field.getName(), value);
+ if (!unusedMappings.remove(key, field)) {
+ throw new JsonMappingException(String.format(
+ "FATAL: Expected to be a bug in Embulk. Mapping \\"%s: (%s) %s\\" might have already been processed, or not in %s.",
+ key,
+ field.getType().toString(),
+ field.getName(),
+ this.iface.toString()));
+ }
}
- objects.put(field.getName(), value);
- unusedMappings.remove(key);
}
}
// set default values
- for (Map.Entry<String, FieldEntry> unused : unusedMappings.entrySet()) {
+ for (Map.Entry<String, FieldEntry> unused : unusedMappings.entries()) {
FieldEntry field = unused.getValue();
if (field.getDefaultJsonString().isPresent()) {
Object value = nestedObjectMapper.readValue(field.getDefaultJsonString().get(), new GenericTypeReference(field.getType()));
diff --git a/embulk-core/src/test/java/org/embulk/config/TestConfigSource.java b/embulk-core/src/test/java/org/embulk/config/TestConfigSource.java
index 73c04563..62a406a7 100644
--- a/embulk-core/src/test/java/org/embulk/config/TestConfigSource.java
+++ b/embulk-core/src/test/java/org/embulk/config/TestConfigSource.java
@@ -4,6 +4,7 @@ import static org.junit.Assert.assertEquals;
import org.embulk.EmbulkTestRuntime;
import org.embulk.spi.Exec;
+import org.embulk.spi.time.TimestampParser;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -36,6 +37,28 @@ public class TestConfigSource {
public String getString();
}
+ private static interface DuplicationParent extends Task {
+ @Config("duplicated_number")
+ public int getInteger();
+ }
+
+ private static interface Duplicated extends DuplicationParent {
+ @Config("duplicated_number")
+ public String getString();
+
+ @Config("duplicated_number")
+ public double getDouble();
+ }
+
+ private static interface DuplicatedDateTimeZone extends Task, TimestampParser.Task {
+ @Config("default_timezone")
+ @ConfigDefault("\\"America/Los_Angeles\\"")
+ public org.joda.time.DateTimeZone getDefaultTimeZone();
+
+ @Config("dummy_value")
+ public String getDummyValue();
+ }
+
@Test
public void testSetGet() {
config.set("boolean", true);
@@ -67,6 +90,43 @@ public class TestConfigSource {
assertEquals("sf", task.getString());
}
+ @Test
+ public void testDuplicatedConfigName() {
+ config.set("duplicated_number", "1034");
+
+ Duplicated task = config.loadConfig(Duplicated.class);
+ assertEquals(1034, task.getInteger());
+ assertEquals("1034", task.getString());
+ assertEquals(1034.0, task.getDouble(), 0.000001);
+ }
+
+ @Test
+ public void testDuplicatedDateTimeZone() {
+ config.set("default_timezone", "Asia/Tokyo");
+ config.set("default_timestamp_format", "%Y");
+ config.set("dummy_value", "foobar");
+
+ DuplicatedDateTimeZone task = config.loadConfig(DuplicatedDateTimeZone.class);
+ assertEquals("Asia/Tokyo", task.getDefaultTimeZoneId());
+ assertEquals(org.joda.time.DateTimeZone.forID("Asia/Tokyo"), task.getDefaultTimeZone());
+ assertEquals("%Y", task.getDefaultTimestampFormat());
+ assertEquals("1970-01-01", task.getDefaultDate());
+ assertEquals("foobar", task.getDummyValue());
+ }
+
+ @Test
+ public void testDuplicatedDateTimeZoneWithDefault() {
+ config.set("default_timestamp_format", "%Y");
+ config.set("dummy_value", "foobar");
+
+ DuplicatedDateTimeZone task = config.loadConfig(DuplicatedDateTimeZone.class);
+ assertEquals("UTC", task.getDefaultTimeZoneId());
+ assertEquals(org.joda.time.DateTimeZone.forID("America/Los_Angeles"), task.getDefaultTimeZone());
+ assertEquals("%Y", task.getDefaultTimestampFormat());
+ assertEquals("1970-01-01", task.getDefaultDate());
+ assertEquals("foobar", task.getDummyValue());
+ }
+
private static interface ValidateFields extends Task {
@Config("valid")
public String getValid(); | ['embulk-core/src/test/java/org/embulk/config/TestConfigSource.java', 'embulk-core/src/main/java/org/embulk/config/TaskSerDe.java', 'embulk-core/src/main/java/org/embulk/config/TaskInvocationHandler.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 944,957 | 180,571 | 24,964 | 237 | 4,025 | 726 | 56 | 2 | 1,054 | 104 | 255 | 15 | 1 | 1 | 2018-02-05T07:45:26 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
554 | embulk/embulk/840/833 | embulk | embulk | https://github.com/embulk/embulk/issues/833 | https://github.com/embulk/embulk/pull/840 | https://github.com/embulk/embulk/pull/840 | 1 | fix | Use Ruby's timezone names instead of Joda-Time's | While I was walking-through and trying to refactor our Ruby-compatible date/time parser, I'm afraid I found that we may not using Ruby's timezone names (`TimeZoneConverter`), but using Joda-Time's.
* https://github.com/embulk/embulk/blob/v0.8.36/embulk-core/src/main/java/org/embulk/spi/time/TimestampParser.java#L188
* https://github.com/embulk/embulk/blob/v0.8.36/embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java#L58-L88
@muga Can you check it? If it's true, I'd fix it throughout my refactoring. | 4c927b84f31aca653422dc0bf27d4d605cc94a5f | ae981c0a0953174b4a3c042a1b93da2339586a76 | https://github.com/embulk/embulk/compare/4c927b84f31aca653422dc0bf27d4d605cc94a5f...ae981c0a0953174b4a3c042a1b93da2339586a76 | diff --git a/embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java b/embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java
index 2a634b1f..857001f1 100644
--- a/embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java
+++ b/embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java
@@ -57,6 +57,12 @@ public class TimestampFormat
public static DateTimeZone parseDateTimeZone(String s)
{
+ final int rubyStyleTimeOffsetInSecond = TimeZoneConverter.dateZoneToDiff(s);
+
+ if (rubyStyleTimeOffsetInSecond != Integer.MIN_VALUE) {
+ return DateTimeZone.forOffsetMillis(rubyStyleTimeOffsetInSecond * 1000);
+ }
+
if(s.startsWith("+") || s.startsWith("-")) {
return DateTimeZone.forID(s);
| ['embulk-core/src/main/java/org/embulk/spi/time/TimestampFormat.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 867,238 | 168,384 | 25,772 | 229 | 251 | 54 | 6 | 1 | 523 | 46 | 145 | 6 | 2 | 0 | 2017-10-26T10:47:00 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
551 | embulk/embulk/1054/1031 | embulk | embulk | https://github.com/embulk/embulk/issues/1031 | https://github.com/embulk/embulk/pull/1054 | https://github.com/embulk/embulk/pull/1054 | 1 | fix | "embulk new" command not work in windows | I have executed command in windows 10 x64
> E:\\work> embulk new java-file-output onefile
```
2018-07-18 10:41:05.724 +0900: Embulk v0.9.7
Creating embulk-output-onefile/
java.lang.UnsupportedOperationException
at sun.nio.fs.WindowsFileSystemProvider.readAttributes(Unknown Source)
at java.nio.file.Files.readAttributes(Unknown Source)
at java.nio.file.Files.getPosixFilePermissions(Unknown Source)
at org.embulk.cli.EmbulkNew.setExecutable(EmbulkNew.java:390)
at org.embulk.cli.EmbulkNew.newPlugin(EmbulkNew.java:169)
at org.embulk.cli.EmbulkRun.runSubcommand(EmbulkRun.java:338)
at org.embulk.cli.EmbulkRun.run(EmbulkRun.java:91)
at org.embulk.cli.Main.main(Main.java:26)
Failed. Removing the directory created.
```
I have executed same command in cent os6, then success. but not work in windows PC.
JDK version is 1.8.0_171.
| 4b8250d88792cd4e9024903e133b76758ef6a02e | ea83f92ecd0b595cd6f04bdad865bbec31e810ab | https://github.com/embulk/embulk/compare/4b8250d88792cd4e9024903e133b76758ef6a02e...ea83f92ecd0b595cd6f04bdad865bbec31e810ab | diff --git a/embulk-core/src/main/java/org/embulk/cli/EmbulkMigrate.java b/embulk-core/src/main/java/org/embulk/cli/EmbulkMigrate.java
index 8064276d..8a5c152b 100644
--- a/embulk-core/src/main/java/org/embulk/cli/EmbulkMigrate.java
+++ b/embulk-core/src/main/java/org/embulk/cli/EmbulkMigrate.java
@@ -4,6 +4,7 @@ import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
@@ -87,7 +88,7 @@ public class EmbulkMigrate {
if (migrator.match("gradle/wrapper/gradle-wrapper.properties", GRADLE_VERSION_IN_WRAPPER)) {
// gradle < 4.1
migrator.copy("org/embulk/plugin/template/java/gradlew", "gradlew");
- migrator.setExecutable("gradlew");
+ migrator.setExecutableIfAvailable("gradlew");
migrator.copy("org/embulk/plugin/template/java/gradle/wrapper/gradle-wrapper.properties",
"gradle/wrapper/gradle-wrapper.properties");
migrator.copy("org/embulk/plugin/template/java/gradle/wrapper/gradle-wrapper.jar",
@@ -381,14 +382,18 @@ public class EmbulkMigrate {
}
}
- private void setExecutable(String targetFileName) throws IOException {
+ private void setExecutableIfAvailable(final String targetFileName) throws IOException {
final Path targetPath = this.basePath.resolve(targetFileName);
- final Set<PosixFilePermission> permissions =
- new HashSet<PosixFilePermission>(Files.getPosixFilePermissions(targetPath));
- permissions.add(PosixFilePermission.OWNER_EXECUTE);
- permissions.add(PosixFilePermission.GROUP_EXECUTE);
- permissions.add(PosixFilePermission.OTHERS_EXECUTE);
- Files.setPosixFilePermissions(targetPath, permissions);
+ final FileSystem fileSystem = targetPath.getFileSystem();
+ if (fileSystem.supportedFileAttributeViews().contains("posix")) {
+ // NTFS does not support PosixFilePermissions, for example.
+ final Set<PosixFilePermission> permissions =
+ new HashSet<PosixFilePermission>(Files.getPosixFilePermissions(targetPath));
+ permissions.add(PosixFilePermission.OWNER_EXECUTE);
+ permissions.add(PosixFilePermission.GROUP_EXECUTE);
+ permissions.add(PosixFilePermission.OTHERS_EXECUTE);
+ Files.setPosixFilePermissions(targetPath, permissions);
+ }
}
private final Path basePath;
diff --git a/embulk-core/src/main/java/org/embulk/cli/EmbulkNew.java b/embulk-core/src/main/java/org/embulk/cli/EmbulkNew.java
index 4317cf1c..82b0cbbf 100644
--- a/embulk-core/src/main/java/org/embulk/cli/EmbulkNew.java
+++ b/embulk-core/src/main/java/org/embulk/cli/EmbulkNew.java
@@ -7,6 +7,7 @@ import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystem;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -166,7 +167,7 @@ public class EmbulkNew {
copy("org/embulk/plugin/template/java/gradle/wrapper/gradle-wrapper.properties", "gradle/wrapper/gradle-wrapper.properties");
copy("org/embulk/plugin/template/java/gradlew.bat", "gradlew.bat");
copy("org/embulk/plugin/template/java/gradlew", "gradlew");
- setExecutable("gradlew");
+ setExecutableIfAvailable("gradlew");
copy("org/embulk/plugin/template/java/config/checkstyle/checkstyle.xml", "config/checkstyle/checkstyle.xml");
copy("org/embulk/plugin/template/java/config/checkstyle/default.xml", "config/checkstyle/default.xml");
copyTemplated("org/embulk/plugin/template/java/build.gradle.vm",
@@ -384,14 +385,18 @@ public class EmbulkNew {
}
}
- private void setExecutable(String targetFileName) throws IOException {
+ private void setExecutableIfAvailable(final String targetFileName) throws IOException {
final Path targetPath = this.pluginBasePath.resolve(targetFileName);
- final Set<PosixFilePermission> permissions =
- new HashSet<PosixFilePermission>(Files.getPosixFilePermissions(targetPath));
- permissions.add(PosixFilePermission.OWNER_EXECUTE);
- permissions.add(PosixFilePermission.GROUP_EXECUTE);
- permissions.add(PosixFilePermission.OTHERS_EXECUTE);
- Files.setPosixFilePermissions(targetPath, permissions);
+ final FileSystem fileSystem = targetPath.getFileSystem();
+ if (fileSystem.supportedFileAttributeViews().contains("posix")) {
+ // NTFS does not support PosixFilePermissions, for example.
+ final Set<PosixFilePermission> permissions =
+ new HashSet<PosixFilePermission>(Files.getPosixFilePermissions(targetPath));
+ permissions.add(PosixFilePermission.OWNER_EXECUTE);
+ permissions.add(PosixFilePermission.GROUP_EXECUTE);
+ permissions.add(PosixFilePermission.OTHERS_EXECUTE);
+ Files.setPosixFilePermissions(targetPath, permissions);
+ }
}
private final Path basePath;
diff --git a/embulk-core/src/main/java/org/embulk/cli/EmbulkSelfUpdate.java b/embulk-core/src/main/java/org/embulk/cli/EmbulkSelfUpdate.java
index 537cb834..58c5c49d 100644
--- a/embulk-core/src/main/java/org/embulk/cli/EmbulkSelfUpdate.java
+++ b/embulk-core/src/main/java/org/embulk/cli/EmbulkSelfUpdate.java
@@ -7,6 +7,7 @@ import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.net.URL;
+import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -88,7 +89,10 @@ public class EmbulkSelfUpdate {
InputStream input = connection.getInputStream();
// TODO(dmikurube): Confirm if it is okay to replace a temp file created by Files.createTempFile.
Files.copy(input, jarPathTemp, StandardCopyOption.REPLACE_EXISTING);
- Files.setPosixFilePermissions(jarPathTemp, Files.getPosixFilePermissions(jarPathJava));
+ final FileSystem fileSystem = jarPathTemp.getFileSystem();
+ if (fileSystem.supportedFileAttributeViews().contains("posix")) {
+ Files.setPosixFilePermissions(jarPathTemp, Files.getPosixFilePermissions(jarPathJava));
+ }
} finally {
connection.disconnect();
} | ['embulk-core/src/main/java/org/embulk/cli/EmbulkMigrate.java', 'embulk-core/src/main/java/org/embulk/cli/EmbulkNew.java', 'embulk-core/src/main/java/org/embulk/cli/EmbulkSelfUpdate.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 992,870 | 189,754 | 26,097 | 242 | 3,201 | 572 | 48 | 3 | 917 | 68 | 245 | 23 | 0 | 1 | 2018-10-29T05:12:21 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
555 | embulk/embulk/761/757 | embulk | embulk | https://github.com/embulk/embulk/issues/757 | https://github.com/embulk/embulk/pull/761 | https://github.com/embulk/embulk/pull/761 | 1 | fix | Embulk 0.8.28 and 0.8.29 doesn't work Liquid {% include %}. | ## Summary
Embulk 0.8.29 doesn't work liquid. {% include %}
The following configuration works fine on 0.8.18, but It doesn't work on 0.8.19.
## Expect behavior
* Embulk 0.8.18
```
embulk preview config.yml.liquid
2017-08-10 12:53:43.210 +0900: Embulk v0.8.18
2017-08-10 12:53:44.502 +0900 [INFO] (0001:preview): Listing local files at directory '/private/tmp/hoge/csv' filtering filename by prefix 'sample_'
2017-08-10 12:53:44.506 +0900 [INFO] (0001:preview): Loading files [/private/tmp/hoge/csv/sample_01.csv.gz]
+---------+--------------+-------------------------+-------------------------+----------------------------+
| id:long | account:long | time:timestamp | purchase:timestamp | comment:string |
+---------+--------------+-------------------------+-------------------------+----------------------------+
| 1 | 32,864 | 2015-01-27 19:23:49 UTC | 2015-01-27 00:00:00 UTC | embulk |
| 2 | 14,824 | 2015-01-27 19:01:23 UTC | 2015-01-27 00:00:00 UTC | embulk jruby |
| 3 | 27,559 | 2015-01-28 02:20:02 UTC | 2015-01-28 00:00:00 UTC | Embulk "csv" parser plugin |
| 4 | 11,270 | 2015-01-29 11:54:36 UTC | 2015-01-29 00:00:00 UTC | |
+---------+--------------+-------------------------+-------------------------+----------------------------+
```
### Actual behavior
* Embulk 0.8.29
```
embulk selfupdate
2017-08-10 12:54:10.851 +0900: Embulk v0.8.18
Checking the latest version...
Found new version 0.8.29.
Downloading https://dl.bintray.com/embulk/maven/embulk-0.8.29.jar ...
Embulk::VERSION in (J)Ruby is deprecated. Use org.embulk.EmbulkVersion::VERSION instead. If this message is from a plugin, please tell this to the author of the plugin!
Updated to 0.8.29.
```
```
embulk preview config.yml.liquid
2017-08-10 12:54:37.897 +0900: Embulk v0.8.29
org.embulk.config.ConfigException: com.fasterxml.jackson.databind.JsonMappingException: Field 'in' is required but not set
at [Source: N/A; line: -1, column: -1]
at org.embulk.config.ModelManager.readObjectWithConfigSerDe(ModelManager.java:75)
at org.embulk.config.DataSourceImpl.loadConfig(DataSourceImpl.java:220)
at org.embulk.exec.PreviewExecutor.doPreview(PreviewExecutor.java:104)
at org.embulk.exec.PreviewExecutor.access$000(PreviewExecutor.java:31)
at org.embulk.exec.PreviewExecutor$1.run(PreviewExecutor.java:83)
at org.embulk.exec.PreviewExecutor$1.run(PreviewExecutor.java:79)
at org.embulk.spi.Exec.doWith(Exec.java:25)
at org.embulk.exec.PreviewExecutor.preview(PreviewExecutor.java:79)
at org.embulk.EmbulkEmbed.preview(EmbulkEmbed.java:169)
at org.embulk.EmbulkRunner.previewInternal(EmbulkRunner.java:242)
at org.embulk.EmbulkRunner.preview(EmbulkRunner.java:118)
at org.embulk.cli.EmbulkRun.runSubcommand(EmbulkRun.java:469)
at org.embulk.cli.EmbulkRun.run(EmbulkRun.java:100)
at org.embulk.cli.EmbulkBundle.checkBundleWithEmbulkVersion(EmbulkBundle.java:42)
at org.embulk.cli.EmbulkBundle.checkBundle(EmbulkBundle.java:15)
at org.embulk.cli.Main.main(Main.java:26)
Caused by: com.fasterxml.jackson.databind.JsonMappingException: Field 'in' is required but not set
at [Source: N/A; line: -1, column: -1]
at org.embulk.config.TaskSerDe$TaskDeserializer.deserialize(TaskSerDe.java:181)
at com.fasterxml.jackson.databind.ObjectMapper._readValue(ObjectMapper.java:3708)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2005)
at org.embulk.config.ModelManager.readObjectWithConfigSerDe(ModelManager.java:72)
... 15 more
Error: com.fasterxml.jackson.databind.JsonMappingException: Field 'in' is required but not set
at [Source: N/A; line: -1, column: -1]
```
### configuration
config.yml.liquid
```
{% include 'commons/test' %}
out: {type: stdout}
```
commons/_test.yml.liquid
```yaml
#
in:
type: file
path_prefix: /private/tmp/hoge/csv/sample_
decoders:
- {type: gzip}
parser:
charset: UTF-8
newline: LF
type: csv
delimiter: ','
quote: '"'
escape: '"'
null_string: 'NULL'
trim_if_not_quoted: false
skip_header_lines: 1
allow_extra_columns: false
allow_optional_columns: false
columns:
- {name: id, type: long}
- {name: account, type: long}
- {name: time, type: timestamp, format: '%Y-%m-%d %H:%M:%S'}
- {name: purchase, type: timestamp, format: '%Y%m%d'}
- {name: comment, type: string}
```
| 0b2755628a9fc266c8f420691639f6633402ae22 | 945997a5eb4e0dfcee63853a6861ff99a90f0800 | https://github.com/embulk/embulk/compare/0b2755628a9fc266c8f420691639f6633402ae22...945997a5eb4e0dfcee63853a6861ff99a90f0800 | diff --git a/embulk-core/src/main/java/org/embulk/EmbulkRunner.java b/embulk-core/src/main/java/org/embulk/EmbulkRunner.java
index 1b418f0b..cc64ca54 100644
--- a/embulk-core/src/main/java/org/embulk/EmbulkRunner.java
+++ b/embulk-core/src/main/java/org/embulk/EmbulkRunner.java
@@ -417,7 +417,9 @@ public class EmbulkRunner
return this.embed.newConfigLoader().fromYamlString(
runLiquid(new String(Files.readAllBytes(configFilePath), StandardCharsets.UTF_8),
templateParams,
- templateIncludePath));
+ (templateIncludePath == null
+ ? configFilePath.toAbsolutePath().getParent().toString()
+ : templateIncludePath)));
}
else if (EXT_YAML.matcher(configFilePath.toString()).matches()) {
return this.embed.newConfigLoader().fromYamlString( | ['embulk-core/src/main/java/org/embulk/EmbulkRunner.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 808,142 | 157,734 | 24,604 | 230 | 244 | 30 | 4 | 1 | 4,546 | 374 | 1,331 | 109 | 1 | 5 | 2017-08-21T06:48:59 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
556 | embulk/embulk/752/750 | embulk | embulk | https://github.com/embulk/embulk/issues/750 | https://github.com/embulk/embulk/pull/752 | https://github.com/embulk/embulk/pull/752 | 2 | fixes | embulk v0.8.28 bundle command is not working. | embulk v0.8.28 breaking bundle command. Maybe related to https://github.com/embulk/embulk/issues/748
- 0.8.28
```
embulk bundle
2017-07-28 09:38:46.392 +0000: Embulk v0.8.28
Exception in thread "main" java.lang.NullPointerException
at org.embulk.cli.EmbulkRun.runSubcommand(EmbulkRun.java:401)
at org.embulk.cli.EmbulkRun.run(EmbulkRun.java:80)
at org.embulk.cli.EmbulkBundle.checkBundleWithEmbulkVersion(EmbulkBundle.java:42)
at org.embulk.cli.EmbulkBundle.checkBundle(EmbulkBundle.java:15)
at org.embulk.cli.Main.main(Main.java:26)
```
- 0.8.27
```
embulk bundle install
2017-07-28 09:43:55.455 +0000: Embulk v0.8.27
Using embulk-executor-mapreduce 0.3.0
```
| 835c14ad8b0f4526539957a46cf2dadb356201be | 87f2195443cc384d23e846fedf7fc21eecc063f7 | https://github.com/embulk/embulk/compare/835c14ad8b0f4526539957a46cf2dadb356201be...87f2195443cc384d23e846fedf7fc21eecc063f7 | diff --git a/embulk-cli/src/main/java/org/embulk/cli/EmbulkRun.java b/embulk-cli/src/main/java/org/embulk/cli/EmbulkRun.java
index 9bc2ae60..c5933a8a 100644
--- a/embulk-cli/src/main/java/org/embulk/cli/EmbulkRun.java
+++ b/embulk-cli/src/main/java/org/embulk/cli/EmbulkRun.java
@@ -398,14 +398,14 @@ public class EmbulkRun
}
return 0;
case BUNDLE:
- if (!commandLine.getArguments().isEmpty() && commandLine.getArguments().get(0).equals("new")) {
- if (commandLine.getArguments().size() != 2) {
+ if (!subcommandArguments.isEmpty() && subcommandArguments.get(0).equals("new")) {
+ if (subcommandArguments.size() != 2) {
printGeneralUsage(System.err);
System.err.println("");
System.err.println("Use `<command> --help` to see description of the commands.");
return 1;
}
- newBundle(commandLine.getArguments().get(1), null);
+ newBundle(subcommandArguments.get(1), null);
System.err.println("'embulk bundle new' is deprecated. This will be removed in future release. Please use 'embulk mkbundle' instead.");
}
else { | ['embulk-cli/src/main/java/org/embulk/cli/EmbulkRun.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 803,674 | 156,922 | 24,514 | 231 | 453 | 100 | 6 | 1 | 691 | 50 | 239 | 22 | 1 | 2 | 2017-07-31T03:59:45 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
550 | embulk/embulk/1349/1348 | embulk | embulk | https://github.com/embulk/embulk/issues/1348 | https://github.com/embulk/embulk/pull/1349 | https://github.com/embulk/embulk/pull/1349 | 2 | fix | embulk mkbundle failed with the reason "undefined local variable or method `__internal_argv__'" | Issue Type: Bug Report
-----------------------
The environment is the same #1347
* Write the following environmental information.
* OS version: macOS 10.15.7
* Java version: 1.8.0_251
* Embulk version: 0.10.21
* Your Embulk configuration (YAML): no config
* Plugin versions: no plugin
## Write all what you did, e.g. your commands executed
`embulk mkbundle` after `embulk gem install bundler` I got the another error
gem installed the `bundler -v 2.1.4`. It seems that this version is too new.
`embulk mkbundle worked` fine with bundler -v 1.16.0
## Write what you expected and observed actually
* Embulk: 0.10.21
* OS: 10.15.7
* Java: 1.8.0_251
```
embulk-dev mkbundle hoge
2020-11-30 09:39:25.026 +0900 [INFO] (main): embulk_home is set from command-line: /path/to/.embulk-dev
Initializing hoge...
Creating Gemfile
Creating .bundle/config
Creating embulk/input/example.rb
Creating embulk/output/example.rb
Creating embulk/filter/example.rb
2020-11-30 09:39:27.520 +0900 [INFO] (main): Environment variable "GEM_HOME" is not set. Setting "GEM_HOME" to "/path/to/.embulk-dev/lib/gems" from Embulk system property "gem_home" for the "bundle" command.
--- ERROR REPORT TEMPLATE -------------------------------------------------------
# Error Report
## Questions
Please fill out answers to these questions, it'll help us figure out
why things are going wrong.
- **What did you do?**
I ran the command `<script> `
- **What did you expect to happen?**
I expected Bundler to...
- **What happened instead?**
Instead, what happened was...
- **Have you tried any solutions posted on similar issues in our issue tracker, stack overflow, or google?**
I tried...
- **Have you read our issues document, https://github.com/bundler/bundler/blob/master/doc/contributing/ISSUES.md?**
...
## Backtrace
NameError: undefined local variable or method `__internal_argv__' for main:Object
<script>:1:in `block in (root)'
/path/to/.embulk-dev/lib/gems/gems/bundler-2.1.4/lib/bundler/friendly_errors.rb:123:in `with_friendly_errors'
<script>:1:in `block in (root)'
org/jruby/RubyDir.java:323:in `chdir'
<script>:1:in `<main>'
## Environment
Bundler 2.1.4
Platforms ruby, universal-java-1.8
Ruby 2.3.3p0 (2017-12-07 revision 56859) [java]
Full Path /Library/Java/JavaVirtualMachines/jdk1.8.0_251.jdk/Contents/Home/jre/bin/java -cp :/path/to/.embulk/jars/jruby-complete-9.1.15.0.jar org.jruby.Main
Config Dir uri:classloader:/META-INF/jruby.home/etc
RubyGems 2.6.14
Gem Home /path/to/.embulk-dev/lib/gems
Gem Path /path/to/.embulk-dev/lib/gems
User Home /Users/hsato
User Path /path/to/.gem/jruby/2.3.0
Bin Dir /path/to/.embulk-dev/lib/gems/bin
Tools
Git 2.28.0
RVM not installed
rbenv rbenv 1.1.2
chruby not installed
## Bundler Build Metadata
Built At 2020-01-05
Git SHA 32a4159325
Released Version true
## Bundler settings
gem.test
Set for the current user (/path/to/.bundle/config): "minitest"
gem.mit
Set for the current user (/path/to/.bundle/config): false
gem.coc
Set for the current user (/path/to/.bundle/config): false
build.thin
Set for the current user (/path/to/.bundle/config): "--with-cflags=-Wno-error=implicit-function-declaration"
## Gemfile
### Gemfile
source 'https://rubygems.org/'
# No versions are specified for 'embulk' to use the gem embedded in embulk.jar.
# Note that prerelease versions (e.g. "0.9.0.beta") do not match the statement.
# Specify the exact prerelease version (like '= 0.9.0.beta') for prereleases.
gem 'embulk'
#
# 1. Use following syntax to specify versions of plugins
# to install this bundle directory:
#
#gem 'embulk-output-mysql' # the latest version
#gem 'embulk-input-baz', '= 0.2.0' # specific version
#gem 'embulk-input-xyz', '~> 0.3.0' # latest major version
#gem 'embulk-output-postgresql', '>= 0.1.0' # newer than specific version
#
#gem 'embulk-output-awesome', git: 'https://github.com/you/embulk-output-awesome.git', branch: 'master'
#
#
# 2. When you modify this file, run following command to
# install plugins:
#
# $ cd this_directory
# $ embulk bundle
#
#
# 3. Then you can use plugins with -b, --bundle BUNDLE_PATH command:
#
# $ embulk guess -b path/to/this/directory ...
# $ embulk run -b path/to/this/directory ...
# $ embulk preview -b path/to/this/directory ...
#
### Gemfile.lock
<No /private/tmp/hoge/Gemfile.lock found>
--- TEMPLATE END ----------------------------------------------------------------
Unfortunately, an unexpected error occurred, and Bundler cannot continue.
First, try this link to see if there are any existing issue reports for this error:
https://github.com/bundler/bundler/search?q=undefined+local+variable+or+method+%60__internal_argv__%27+for+main+Object&type=Issues
If there aren't any reports for this error yet, please create copy and paste the report template above into a new issue. Don't forget to anonymize any private data! The new issue form is located at:
https://github.com/bundler/bundler/issues/new
```
```
embulk-dev gem install bundler
embulk-dev gem list
*** LOCAL GEMS ***
bundler (2.1.4)
did_you_mean (default: 1.0.1)
embulk (0.10.21 java)
jar-dependencies (default: 0.3.10)
jruby-openssl (0.9.21 java)
jruby-readline (1.2.0 java)
json (1.8.3 java)
minitest (default: 5.4.1)
net-telnet (default: 0.1.1)
power_assert (default: 0.2.3)
psych (2.2.4 java)
rake (default: 10.4.2)
rdoc (default: 4.2.0)
test-unit (default: 3.1.1)
```
| 42ecca3361a1688f86aa6456462a1e998ef62240 | 311652e5ea15a19cb147b5e6d3ec7c754dc286a5 | https://github.com/embulk/embulk/compare/42ecca3361a1688f86aa6456462a1e998ef62240...311652e5ea15a19cb147b5e6d3ec7c754dc286a5 | diff --git a/embulk-core/src/main/java/org/embulk/cli/EmbulkRun.java b/embulk-core/src/main/java/org/embulk/cli/EmbulkRun.java
index 41e93ef3..6487ec35 100644
--- a/embulk-core/src/main/java/org/embulk/cli/EmbulkRun.java
+++ b/embulk-core/src/main/java/org/embulk/cli/EmbulkRun.java
@@ -250,7 +250,10 @@ public class EmbulkRun {
localJRubyContainer.runScriptlet("Bundler.with_friendly_errors { Bundler::CLI.start(Array.new(__internal_argv_java__), debug: true) }");
} else {
localJRubyContainer.put("__internal_working_dir__", path.toString());
- localJRubyContainer.runScriptlet("Dir.chdir(__internal_working_dir__) { Bundler.with_friendly_errors { Bundler::CLI.start(__internal_argv__, debug: true) } }");
+ localJRubyContainer.runScriptlet(
+ "Dir.chdir(__internal_working_dir__) {"
+ + " Bundler.with_friendly_errors { Bundler::CLI.start(Array.new(__internal_argv_java__), debug: true) }"
+ + "}");
localJRubyContainer.remove("__internal_working_dir__");
}
localJRubyContainer.remove("__internal_argv_java__"); | ['embulk-core/src/main/java/org/embulk/cli/EmbulkRun.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,120,666 | 222,029 | 30,173 | 266 | 437 | 93 | 5 | 1 | 5,774 | 653 | 1,679 | 188 | 5 | 2 | 2020-11-30T07:46:32 | 1,692 | Java | {'Java': 1206535, 'Ruby': 102994, 'Batchfile': 4257, 'Shell': 3118, 'HTML': 538} | Apache License 2.0 |
295 | morphiaorg/morphia/869/860 | morphiaorg | morphia | https://github.com/MorphiaOrg/morphia/issues/860 | https://github.com/MorphiaOrg/morphia/pull/869 | https://github.com/MorphiaOrg/morphia/pull/869 | 1 | fixes | Empty maps are deserialized as null | When configuring the mapper to also store empties
```
mapper.getOptions().setStoreEmpties(true);
```
then empty Maps and Lists are stored as well to DB.
They also get deserialized as empty Maps resp. List again.
This works for an empty
```
Map<String, String>
```
BUT it does not work for empty Maps like the following
```
Map<String, List<String>>
Map<String, MyObject>
```
where
```
class MyObject {
private String valueA;
private int valueB;
}
```
It seems when having a complex type in the value part of the map, the map is no longer deserialized as empty map but simply as null object, which subsequently breaks the code as they are no longer equal after loading.
Tested with Morphia 1.0.1
| dc0745c7069d7ee48338ea73a934a7c2191305bb | 57eb1eea54d1a41fe8dd1e23dd9dfa754b556dce | https://github.com/morphiaorg/morphia/compare/dc0745c7069d7ee48338ea73a934a7c2191305bb...57eb1eea54d1a41fe8dd1e23dd9dfa754b556dce | diff --git a/morphia/src/main/java/org/mongodb/morphia/mapping/EmbeddedMapper.java b/morphia/src/main/java/org/mongodb/morphia/mapping/EmbeddedMapper.java
index dcf43e71cc5..8369c31f434 100644
--- a/morphia/src/main/java/org/mongodb/morphia/mapping/EmbeddedMapper.java
+++ b/morphia/src/main/java/org/mongodb/morphia/mapping/EmbeddedMapper.java
@@ -188,7 +188,7 @@ class EmbeddedMapper implements CustomMapper {
}
});
- if (!map.isEmpty()) {
+ if (!map.isEmpty() || mapper.getOptions().isStoreEmpties()) {
mf.setFieldValue(entity, map);
}
}
diff --git a/morphia/src/test/java/org/mongodb/morphia/mapping/MapperOptionsTest.java b/morphia/src/test/java/org/mongodb/morphia/mapping/MapperOptionsTest.java
index 696b3002261..f4c2e0e9d4b 100644
--- a/morphia/src/test/java/org/mongodb/morphia/mapping/MapperOptionsTest.java
+++ b/morphia/src/test/java/org/mongodb/morphia/mapping/MapperOptionsTest.java
@@ -14,6 +14,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Collection;
/**
@@ -72,6 +73,42 @@ public class MapperOptionsTest extends TestBase {
shouldNotFindField(hm);
}
+ @Test
+ public void emptyCollectionValuedMapStoredWithOptions() throws Exception {
+ final HasCollectionValuedMap hm = new HasCollectionValuedMap();
+ hm.properties = new HashMap<String, Collection<String>>();
+
+ //Test default behavior
+ getMorphia().getMapper().getOptions().setStoreEmpties(false);
+ shouldNotFindField(hm);
+
+ //Test default storing empty map with storeEmpties option
+ getMorphia().getMapper().getOptions().setStoreEmpties(true);
+ shouldFindField(hm, new HashMap<String, Collection<String>>());
+
+ //Test opposite from above
+ getMorphia().getMapper().getOptions().setStoreEmpties(false);
+ shouldNotFindField(hm);
+ }
+
+ @Test
+ public void emptyComplexObjectValuedMapStoredWithOptions() throws Exception {
+ final HasComplexObjectValuedMap hm = new HasComplexObjectValuedMap();
+ hm.properties = new HashMap<String, ComplexObject>();
+
+ //Test default behavior
+ getMorphia().getMapper().getOptions().setStoreEmpties(false);
+ shouldNotFindField(hm);
+
+ //Test default storing empty map with storeEmpties option
+ getMorphia().getMapper().getOptions().setStoreEmpties(true);
+ shouldFindField(hm, new HashMap<String, ComplexObject>());
+
+ //Test opposite from above
+ getMorphia().getMapper().getOptions().setStoreEmpties(false);
+ shouldNotFindField(hm);
+ }
+
@Test
public void lowercaseDefaultCollection() {
DummyEntity entity = new DummyEntity();
@@ -137,6 +174,22 @@ public class MapperOptionsTest extends TestBase {
Assert.assertEquals(expected, getDs().createQuery(HasMap.class).get().properties);
}
+ private void shouldFindField(final HasCollectionValuedMap hm, final Map<String, Collection<String>> expected) {
+ final DBObject dbObj;
+ getDs().save(hm);
+ dbObj = getDs().getCollection(HasCollectionValuedMap.class).findOne();
+ Assert.assertTrue("Should find the field", dbObj.containsField("properties"));
+ Assert.assertEquals(expected, getDs().createQuery(HasCollectionValuedMap.class).get().properties);
+ }
+
+ private void shouldFindField(final HasComplexObjectValuedMap hm, final Map<String, ComplexObject> expected) {
+ final DBObject dbObj;
+ getDs().save(hm);
+ dbObj = getDs().getCollection(HasComplexObjectValuedMap.class).findOne();
+ Assert.assertTrue("Should find the field", dbObj.containsField("properties"));
+ Assert.assertEquals(expected, getDs().createQuery(HasComplexObjectValuedMap.class).get().properties);
+ }
+
private void shouldNotFindField(final HasMap hl) {
getDs().save(hl);
DBObject dbObj = getDs().getCollection(HasMap.class).findOne();
@@ -151,6 +204,20 @@ public class MapperOptionsTest extends TestBase {
Assert.assertNull(getDs().createQuery(HasList.class).get().names);
}
+ private void shouldNotFindField(final HasCollectionValuedMap hm) {
+ getDs().save(hm);
+ DBObject dbObj = getDs().getCollection(HasCollectionValuedMap.class).findOne();
+ Assert.assertFalse("field should not exist, value = " + dbObj.get("properties"), dbObj.containsField("properties"));
+ Assert.assertNull(getDs().createQuery(HasCollectionValuedMap.class).get().properties);
+ }
+
+ private void shouldNotFindField(final HasComplexObjectValuedMap hm) {
+ getDs().save(hm);
+ DBObject dbObj = getDs().getCollection(HasComplexObjectValuedMap.class).findOne();
+ Assert.assertFalse("field should not exist, value = " + dbObj.get("properties"), dbObj.containsField("properties"));
+ Assert.assertNull(getDs().createQuery(HasComplexObjectValuedMap.class).get().properties);
+ }
+
private static class HasList implements Serializable {
@Id
private ObjectId id = new ObjectId();
@@ -169,8 +236,30 @@ public class MapperOptionsTest extends TestBase {
}
}
+ private static class HasCollectionValuedMap implements Serializable {
+ @Id
+ private ObjectId id = new ObjectId();
+ private Map<String, Collection<String>> properties;
+
+ HasCollectionValuedMap() {
+ }
+ }
+
+ private static class HasComplexObjectValuedMap implements Serializable {
+ @Id
+ private ObjectId id = new ObjectId();
+ private Map<String, ComplexObject> properties;
+
+ HasComplexObjectValuedMap() {
+ }
+ }
+
@Entity
private static class DummyEntity {
}
+ private static class ComplexObject {
+ private String stringVal;
+ private int intVal;
+ }
} | ['morphia/src/test/java/org/mongodb/morphia/mapping/MapperOptionsTest.java', 'morphia/src/main/java/org/mongodb/morphia/mapping/EmbeddedMapper.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 807,829 | 171,175 | 24,762 | 265 | 101 | 25 | 2 | 1 | 710 | 116 | 174 | 34 | 0 | 4 | 2015-11-11T03:56:40 | 1,611 | Java | {'Java': 2408208, 'Kotlin': 39110, 'Shell': 718} | Apache License 2.0 |
296 | morphiaorg/morphia/845/844 | morphiaorg | morphia | https://github.com/MorphiaOrg/morphia/issues/844 | https://github.com/MorphiaOrg/morphia/pull/845 | https://github.com/MorphiaOrg/morphia/pull/845 | 1 | fixes | Incorrect validation exception messages in QueryValidator | In QueryValidator's validateQuery method, when a mapped field is not found for a given part, the exception message refers to the original class being mapped rather than the class being used to look up the field:
```
if (mf == null) {
throw new ValidationException(format("The field '%s' could not be found in '%s' while validating - %s; if "
+ "you wish to continue please disable validation.", part,
clazz.getName(), prop
));
}
```
Further along, when checking for queries into `@Reference` or `@Serialized` fields, the exception message is:
```
format("Can not use dot-notation past '%s' could not be found in '%s' while"
+ " validating - %s", part, clazz.getName(), prop))
```
which looks like a copy-paste typo.
I have a branch off of master on my fork which fixes these issues; should I create a pull request?
| 68c11a3a1018c464ee8598d262b8c06dc48ddd6c | 61a00e26a1df513541016089138237fc96871f43 | https://github.com/morphiaorg/morphia/compare/68c11a3a1018c464ee8598d262b8c06dc48ddd6c...61a00e26a1df513541016089138237fc96871f43 | diff --git a/morphia/src/main/java/org/mongodb/morphia/query/QueryValidator.java b/morphia/src/main/java/org/mongodb/morphia/query/QueryValidator.java
index df732969bef..ba17e02d818 100644
--- a/morphia/src/main/java/org/mongodb/morphia/query/QueryValidator.java
+++ b/morphia/src/main/java/org/mongodb/morphia/query/QueryValidator.java
@@ -67,7 +67,7 @@ final class QueryValidator {
if (mf == null) {
throw new ValidationException(format("The field '%s' could not be found in '%s' while validating - %s; if "
+ "you wish to continue please disable validation.", part,
- clazz.getName(), prop
+ mc.getClazz().getName(), prop
));
}
hasTranslations = true;
@@ -87,8 +87,8 @@ final class QueryValidator {
if (!fieldIsArrayOperator) {
//catch people trying to search/update into @Reference/@Serialized fields
if (!canQueryPast(mf)) {
- throw new ValidationException(format("Can not use dot-notation past '%s' could not be found in '%s' while"
- + " validating - %s", part, clazz.getName(), prop));
+ throw new ValidationException(format("Cannot use dot-notation past '%s' in '%s'; found while"
+ + " validating - %s", part, mc.getClazz().getName(), prop));
}
//get the next MappedClass for the next field validation
diff --git a/morphia/src/test/java/org/mongodb/morphia/TestQuery.java b/morphia/src/test/java/org/mongodb/morphia/TestQuery.java
index eaf15da3490..beca4969eba 100644
--- a/morphia/src/test/java/org/mongodb/morphia/TestQuery.java
+++ b/morphia/src/test/java/org/mongodb/morphia/TestQuery.java
@@ -730,7 +730,7 @@ public class TestQuery extends TestBase {
getDs().find(ContainsPic.class, "pic.name", "foo").get();
assertNull("um, query validation should have thrown");
} catch (ValidationException e) {
- assertTrue(e.getMessage().contains("Can not use dot-"));
+ assertTrue(e.getMessage().contains("Cannot use dot-"));
}
}
diff --git a/morphia/src/test/java/org/mongodb/morphia/query/QueryValidatorTest.java b/morphia/src/test/java/org/mongodb/morphia/query/QueryValidatorTest.java
index e7e49cbe45c..28d2ede659a 100644
--- a/morphia/src/test/java/org/mongodb/morphia/query/QueryValidatorTest.java
+++ b/morphia/src/test/java/org/mongodb/morphia/query/QueryValidatorTest.java
@@ -2,8 +2,12 @@ package org.mongodb.morphia.query;
import com.mongodb.BasicDBObject;
import org.bson.types.ObjectId;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
import org.mongodb.morphia.Key;
+import org.mongodb.morphia.annotations.Reference;
+import org.mongodb.morphia.annotations.Serialized;
import org.mongodb.morphia.entities.EntityWithListsAndArrays;
import org.mongodb.morphia.entities.SimpleEntity;
import org.mongodb.morphia.mapping.MappedClass;
@@ -11,6 +15,7 @@ import org.mongodb.morphia.mapping.MappedField;
import org.mongodb.morphia.mapping.Mapper;
import org.mongodb.morphia.query.validation.ValidationFailure;
+import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -31,6 +36,9 @@ import static org.mongodb.morphia.query.FilterOperator.SIZE;
import static org.mongodb.morphia.query.QueryValidator.validateQuery;
public class QueryValidatorTest {
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
@Test
public void shouldAllowAllOperatorForIterableMapAndArrayValues() {
// expect
@@ -459,10 +467,46 @@ public class QueryValidatorTest {
new ArrayList<ValidationFailure>()), is(false));
}
+ @Test
+ public void shouldReferToMappedClassInExceptionWhenFieldNotFound() {
+ thrown.expect(ValidationException.class);
+ thrown.expectMessage("The field 'notAField' could not be found in 'org.bson.types.ObjectId'");
+ validateQuery(SimpleEntity.class, new Mapper(), new StringBuilder("id.notAField"), FilterOperator.EQUAL, 1, true, true);
+ }
+
+ @Test
+ public void shouldReferToMappedClassInExceptionWhenQueryingPastReferenceField() {
+ thrown.expect(ValidationException.class);
+ thrown.expectMessage("Cannot use dot-notation past 'reference' in 'org.mongodb.morphia.query.QueryValidatorTest$WithReference'");
+ validateQuery(WithReference.class, new Mapper(), new StringBuilder("reference.name"), FilterOperator.EQUAL, "", true, true);
+ }
+
+ @Test
+ public void shouldReferToMappedClassInExceptionWhenQueryingPastSerializedField() {
+ thrown.expect(ValidationException.class);
+ thrown.expectMessage("Cannot use dot-notation past 'serialized' in "
+ + "'org.mongodb.morphia.query.QueryValidatorTest$WithSerializedField'");
+ validateQuery(WithSerializedField.class, new Mapper(), new StringBuilder("serialized.name"), FilterOperator.EQUAL, "", true, true);
+ }
+
private static class GeoEntity {
private final int[] array = {1};
}
private static class NullClass {
}
+
+ private static class WithReference {
+ @Reference
+ private SimpleEntity reference;
+ }
+
+ private static class SerializableClass implements Serializable {
+ private String name;
+ }
+
+ private static class WithSerializedField {
+ @Serialized
+ private SerializableClass serialized;
+ }
} | ['morphia/src/main/java/org/mongodb/morphia/query/QueryValidator.java', 'morphia/src/test/java/org/mongodb/morphia/TestQuery.java', 'morphia/src/test/java/org/mongodb/morphia/query/QueryValidatorTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 807,826 | 171,173 | 24,762 | 265 | 664 | 98 | 6 | 1 | 1,287 | 137 | 209 | 22 | 0 | 2 | 2015-09-09T14:26:19 | 1,611 | Java | {'Java': 2408208, 'Kotlin': 39110, 'Shell': 718} | Apache License 2.0 |
297 | morphiaorg/morphia/842/839 | morphiaorg | morphia | https://github.com/MorphiaOrg/morphia/issues/839 | https://github.com/MorphiaOrg/morphia/pull/842 | https://github.com/MorphiaOrg/morphia/pull/842 | 1 | fixes | GeoNearBuilder.setNear() creates wrong near query (mixing up latitude & longutide) | When creating an geo spatial aggregation (http://docs.mongodb.org/manual/reference/operator/aggregation/geoNear/), the near field takes the coordinates as [longitude, latitude].
In morphia the GeoNearBuilder method setNear is defined with:
public GeoNearBuilder setNear(final double latitude, final double longitude) {
this.near = new double[]{latitude, longitude};
return this;
}
A copy of this array of double values is then later put into a DBObject (see AggregationPipeline class line 166). Thus, in the corresponding aggregation query, the query would be:
$geoNear: {
near: [latutide, longitude],
...
}
Morphia version 1.0.1, Mongo Java driver version 3.0.3
JDK version 1.8.0_51
MongoDB version 3.0.5
| cb89d19e8b5f74ce084bba99f8b5b6edb032ae0d | e5727de983e0b5a75aeb3d11074aec7cf59ec4ea | https://github.com/morphiaorg/morphia/compare/cb89d19e8b5f74ce084bba99f8b5b6edb032ae0d...e5727de983e0b5a75aeb3d11074aec7cf59ec4ea | diff --git a/morphia/src/main/java/org/mongodb/morphia/aggregation/GeoNear.java b/morphia/src/main/java/org/mongodb/morphia/aggregation/GeoNear.java
index 79d408f6140..ba1c54989ec 100644
--- a/morphia/src/main/java/org/mongodb/morphia/aggregation/GeoNear.java
+++ b/morphia/src/main/java/org/mongodb/morphia/aggregation/GeoNear.java
@@ -268,7 +268,7 @@ public final class GeoNear {
* @return this
*/
public GeoNearBuilder setNear(final double latitude, final double longitude) {
- this.near = new double[]{latitude, longitude};
+ this.near = new double[]{longitude, latitude};
return this;
}
diff --git a/morphia/src/test/java/org/mongodb/morphia/aggregation/AggregationTest.java b/morphia/src/test/java/org/mongodb/morphia/aggregation/AggregationTest.java
index 973bd0629ed..13b121b3a56 100644
--- a/morphia/src/test/java/org/mongodb/morphia/aggregation/AggregationTest.java
+++ b/morphia/src/test/java/org/mongodb/morphia/aggregation/AggregationTest.java
@@ -24,6 +24,8 @@ import org.junit.Test;
import org.mongodb.morphia.TestBase;
import org.mongodb.morphia.annotations.Entity;
import org.mongodb.morphia.annotations.Id;
+import org.mongodb.morphia.geo.City;
+import org.mongodb.morphia.geo.PlaceWithLegacyCoords;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -37,6 +39,7 @@ import static org.mongodb.morphia.aggregation.Group.push;
import static org.mongodb.morphia.aggregation.Group.sum;
import static org.mongodb.morphia.aggregation.Projection.divide;
import static org.mongodb.morphia.aggregation.Projection.projection;
+import static org.mongodb.morphia.geo.GeoJson.point;
public class AggregationTest extends TestBase {
@Test
@@ -62,13 +65,64 @@ public class AggregationTest extends TestBase {
}
@Test
- public void testGeoNear() {
- // Given
-
-
- // When
+ public void testGeoNearWithSphericalGeometry() {
+ // given
+ double latitude = 51.5286416;
+ double longitude = -0.1015987;
+ City london = new City("London", point(latitude, longitude));
+ getDs().save(london);
+ City manchester = new City("Manchester", point(53.4722454, -2.2235922));
+ getDs().save(manchester);
+ City sevilla = new City("Sevilla", point(37.3753708, -5.9550582));
+ getDs().save(sevilla);
+
+ getDs().ensureIndexes();
+
+ // when
+ Iterator<City> citiesOrderedByDistanceFromLondon = getDs().createAggregation(City.class)
+ .geoNear(GeoNear.builder("distance")
+ .setNear(latitude, longitude)
+ .setSpherical(true)
+ .build())
+ .aggregate(City.class);
+
+ // then
+ Assert.assertTrue(citiesOrderedByDistanceFromLondon.hasNext());
+ Assert.assertEquals(london, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertEquals(manchester, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertEquals(sevilla, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertFalse(citiesOrderedByDistanceFromLondon.hasNext());
+ }
- // Then
+ @Test
+ public void testGeoNearWithLegacyCoords() {
+ // given
+ double latitude = 51.5286416;
+ double longitude = -0.1015987;
+ PlaceWithLegacyCoords london = new PlaceWithLegacyCoords(new double[]{longitude, latitude}, "London");
+ getDs().save(london);
+ PlaceWithLegacyCoords manchester = new PlaceWithLegacyCoords(new double[]{-2.2235922, 53.4722454}, "Manchester");
+ getDs().save(manchester);
+ PlaceWithLegacyCoords sevilla = new PlaceWithLegacyCoords(new double[]{-5.9550582, 37.3753708}, "Sevilla");
+ getDs().save(sevilla);
+
+ getDs().ensureIndexes();
+
+ // when
+ Iterator<PlaceWithLegacyCoords> citiesOrderedByDistanceFromLondon = getDs()
+ .createAggregation(PlaceWithLegacyCoords.class)
+ .geoNear(GeoNear.builder("distance")
+ .setNear(latitude, longitude)
+ .setSpherical(false)
+ .build())
+ .aggregate(PlaceWithLegacyCoords.class);
+
+ // then
+ Assert.assertTrue(citiesOrderedByDistanceFromLondon.hasNext());
+ Assert.assertEquals(london, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertEquals(manchester, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertEquals(sevilla, citiesOrderedByDistanceFromLondon.next());
+ Assert.assertFalse(citiesOrderedByDistanceFromLondon.hasNext());
}
@Test
diff --git a/morphia/src/test/java/org/mongodb/morphia/geo/PlaceWithLegacyCoords.java b/morphia/src/test/java/org/mongodb/morphia/geo/PlaceWithLegacyCoords.java
index f31c5cfdb68..b8a61768b01 100644
--- a/morphia/src/test/java/org/mongodb/morphia/geo/PlaceWithLegacyCoords.java
+++ b/morphia/src/test/java/org/mongodb/morphia/geo/PlaceWithLegacyCoords.java
@@ -8,14 +8,14 @@ import org.mongodb.morphia.utils.IndexDirection;
import java.util.Arrays;
@SuppressWarnings("unused")
-class PlaceWithLegacyCoords {
+public class PlaceWithLegacyCoords {
@Id
private ObjectId id;
@Indexed(IndexDirection.GEO2D)
private double[] location = new double[2];
private String name;
- PlaceWithLegacyCoords(final double[] location, final String name) {
+ public PlaceWithLegacyCoords(final double[] location, final String name) {
this.location = location;
this.name = name;
} | ['morphia/src/main/java/org/mongodb/morphia/aggregation/GeoNear.java', 'morphia/src/test/java/org/mongodb/morphia/geo/PlaceWithLegacyCoords.java', 'morphia/src/test/java/org/mongodb/morphia/aggregation/AggregationTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 807,826 | 171,173 | 24,762 | 265 | 119 | 24 | 2 | 1 | 748 | 93 | 181 | 20 | 1 | 0 | 2015-09-03T13:50:11 | 1,611 | Java | {'Java': 2408208, 'Kotlin': 39110, 'Shell': 718} | Apache License 2.0 |
298 | morphiaorg/morphia/823/784 | morphiaorg | morphia | https://github.com/MorphiaOrg/morphia/issues/784 | https://github.com/MorphiaOrg/morphia/pull/823 | https://github.com/MorphiaOrg/morphia/pull/823 | 1 | fixes | NullPointerException mapping class implementing an interface | Using Morphia 1.0.0-rc0:
Invoking Morphia.map() on a class which implements an interface produces a NullPointerException because:
in ReflectionUtils.getParameterizedClass:
```
public static Class getParameterizedClass(final Class c, final int index) {
final TypeVariable[] typeVars = c.getTypeParameters();
if (typeVars.length > 0) {
final TypeVariable typeVariable = typeVars[index];
final Type[] bounds = typeVariable.getBounds();
final Type type = bounds[0];
if (type instanceof Class) {
return (Class) type; // broke for EnumSet, cause bounds contain
// type instead of class
} else {
return null;
}
} else {
final Type superclass = c.getGenericSuperclass();
if (superclass instanceof ParameterizedType) {
final Type[] actualTypeArguments = ((ParameterizedType) superclass).getActualTypeArguments();
return actualTypeArguments.length > index ? (Class<?>) actualTypeArguments[index] : null;
} else if (!Object.class.equals(superclass)) {
return getParameterizedClass((Class) superclass);
} else {
return null;
}
}
}
```
This line:
```
final Type superclass = c.getGenericSuperclass();
```
when executed on an interface results in superclass being null which causes a consequent invocation during execution of
```
getParameterizedClass((Class) superclass)
```
to fail with NPE.
| 41e410cd664f432d3dea34e1765b3b45b7cb7c10 | 2f39232e8a7727612dc08ac50fc4658e11622f2c | https://github.com/morphiaorg/morphia/compare/41e410cd664f432d3dea34e1765b3b45b7cb7c10...2f39232e8a7727612dc08ac50fc4658e11622f2c | diff --git a/morphia/src/main/java/org/mongodb/morphia/utils/ReflectionUtils.java b/morphia/src/main/java/org/mongodb/morphia/utils/ReflectionUtils.java
index 4e92a715cae..e46147eb9c8 100644
--- a/morphia/src/main/java/org/mongodb/morphia/utils/ReflectionUtils.java
+++ b/morphia/src/main/java/org/mongodb/morphia/utils/ReflectionUtils.java
@@ -299,7 +299,13 @@ public final class ReflectionUtils {
return null;
}
} else {
- final Type superclass = c.getGenericSuperclass();
+ Type superclass = c.getGenericSuperclass();
+ if (superclass == null && c.isInterface()) {
+ Type[] interfaces = c.getGenericInterfaces();
+ if (interfaces.length > 0) {
+ superclass = interfaces[index];
+ }
+ }
if (superclass instanceof ParameterizedType) {
final Type[] actualTypeArguments = ((ParameterizedType) superclass).getActualTypeArguments();
return actualTypeArguments.length > index ? (Class<?>) actualTypeArguments[index] : null;
diff --git a/morphia/src/test/java/org/mongodb/morphia/utils/ReflectionUtilsTest.java b/morphia/src/test/java/org/mongodb/morphia/utils/ReflectionUtilsTest.java
index e40035d842f..960abef4df2 100644
--- a/morphia/src/test/java/org/mongodb/morphia/utils/ReflectionUtilsTest.java
+++ b/morphia/src/test/java/org/mongodb/morphia/utils/ReflectionUtilsTest.java
@@ -1,6 +1,5 @@
package org.mongodb.morphia.utils;
-import org.junit.Ignore;
import org.junit.Test;
import org.mongodb.morphia.TestBase;
import org.mongodb.morphia.annotations.Entity;
@@ -16,6 +15,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import static org.hamcrest.CoreMatchers.isA;
@@ -31,9 +31,8 @@ import static org.mongodb.morphia.testutil.ExactClassMatcher.exactClass;
public class ReflectionUtilsTest extends TestBase {
@Test
- @Ignore("Not implemented yet")
- public void shouldAcceptInterfacesWithoutGenericParameters() {
- Class parameterizedClass = ReflectionUtils.getParameterizedClass(InterfaceWithoutGenericTypes.class);
+ public void shouldAcceptMapWithoutItsOwnGenericParameters() {
+ Class parameterizedClass = ReflectionUtils.getParameterizedClass(MapWithoutGenericTypes.class);
assertThat(parameterizedClass, is(exactClass(Integer.class)));
}
@@ -107,7 +106,7 @@ public class ReflectionUtilsTest extends TestBase {
assertThat(ReflectionUtils.getClassEntityAnnotation(Fooble.class).value(), is(Mapper.IGNORED_FIELDNAME));
}
- private interface InterfaceWithoutGenericTypes extends List<Integer> {
+ private interface MapWithoutGenericTypes extends Map<Integer, String> {
}
@Entity("generic_arrays") | ['morphia/src/test/java/org/mongodb/morphia/utils/ReflectionUtilsTest.java', 'morphia/src/main/java/org/mongodb/morphia/utils/ReflectionUtils.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 805,040 | 170,603 | 24,712 | 265 | 373 | 64 | 8 | 1 | 1,481 | 160 | 314 | 48 | 0 | 3 | 2015-07-09T15:09:17 | 1,611 | Java | {'Java': 2408208, 'Kotlin': 39110, 'Shell': 718} | Apache License 2.0 |
148 | magefree/mage/8592/6393 | magefree | mage | https://github.com/magefree/mage/issues/6393 | https://github.com/magefree/mage/pull/8592 | https://github.com/magefree/mage/pull/8592 | 1 | closes | Elvish House Party uses system-local time zone to determine current hour, which may not be the same between clients | The solution *should* be simple enough - specify a time zone when calling `.now()`. UTC is uncontroversial.[citation needed]
```java
class CurrentHourCount implements DynamicValue {
@Override
public int calculate(Game game, Ability sourceAbility, Effect effect) {
int hour = LocalTime.now(ZoneId.of("UTC")).getHour();
// convert 24-hour value to 12-hour
if (hour > 12) {
hour -= 12;
}
if (hour == 0) {
hour = 12;
}
return hour;
}
@Override
public DynamicValue copy() {
return new CurrentHourCount();
}
@Override
public String getMessage() {
return "current hour, using the twelve-hour system";
}
}
``` | 26a2e0a5edf47bbb1a03a650f29f9d712adf7722 | b184f15125392e319827e3653828ff7ba7c52f18 | https://github.com/magefree/mage/compare/26a2e0a5edf47bbb1a03a650f29f9d712adf7722...b184f15125392e319827e3653828ff7ba7c52f18 | diff --git a/Mage.Sets/src/mage/cards/e/ElvishHouseParty.java b/Mage.Sets/src/mage/cards/e/ElvishHouseParty.java
index 95d6f79e7d..fc1dd34f33 100644
--- a/Mage.Sets/src/mage/cards/e/ElvishHouseParty.java
+++ b/Mage.Sets/src/mage/cards/e/ElvishHouseParty.java
@@ -2,6 +2,7 @@
package mage.cards.e;
import java.time.LocalTime;
+import java.time.ZoneId;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.Ability;
@@ -50,7 +51,7 @@ class CurrentHourCount implements DynamicValue {
@Override
public int calculate(Game game, Ability sourceAbility, Effect effect) {
- int hour = LocalTime.now().getHour();
+ int hour = LocalTime.now(ZoneId.of("UTC")).getHour();
// convert 24-hour value to 12-hour
if (hour > 12) {
hour -= 12; | ['Mage.Sets/src/mage/cards/e/ElvishHouseParty.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 68,880,372 | 15,534,947 | 1,907,004 | 26,726 | 135 | 35 | 3 | 1 | 765 | 89 | 173 | 29 | 0 | 1 | 2022-01-25T01:46:37 | 1,605 | Java | {'Java': 83753240, 'Perl': 96893, 'Batchfile': 17988, 'Python': 5771, 'Shell': 858} | MIT License |
147 | magefree/mage/8618/7723 | magefree | mage | https://github.com/magefree/mage/issues/7723 | https://github.com/magefree/mage/pull/8618 | https://github.com/magefree/mage/pull/8618 | 1 | fixes | Deck import must search cards by left names too | from #7459
Full flip card names doesn't recognized by import code:
* `Orochi Eggwatcher // Shidako, Broodmistress`
Cause xmage's flip card name is left card only:
* `Orochi Eggwatcher`
So the search code my split search name by ` // ` and search by full and left name too. Possible methods to fix (search both names):
* `findPreferedCoreExpansionCard`
* `findCardWPreferredSet` | 4d0f53da67990d2371d1d15d4c21ec1d92e89b96 | 6647c36f07dab004872c526da1c91c5ded99a2b2 | https://github.com/magefree/mage/compare/4d0f53da67990d2371d1d15d4c21ec1d92e89b96...6647c36f07dab004872c526da1c91c5ded99a2b2 | diff --git a/Mage/src/main/java/mage/cards/repository/CardRepository.java b/Mage/src/main/java/mage/cards/repository/CardRepository.java
index 750a3be431..52b3cac6db 100644
--- a/Mage/src/main/java/mage/cards/repository/CardRepository.java
+++ b/Mage/src/main/java/mage/cards/repository/CardRepository.java
@@ -442,7 +442,6 @@ public enum CardRepository {
cards = findCards(name);
}
if (!cards.isEmpty()) {
- CardInfo cardToUse = null;
for (CardInfo cardinfo : cards) {
if (cardinfo.getSetCode() != null && expansion != null && expansion.equalsIgnoreCase(cardinfo.getSetCode())) {
return cardinfo;
@@ -470,9 +469,25 @@ public enum CardRepository {
if (limitByMaxAmount > 0) {
queryBuilder.limit(limitByMaxAmount);
}
- return cardDao.query(queryBuilder.prepare());
+
+ List<CardInfo> result = cardDao.query(queryBuilder.prepare());
+
+ // Got no results, could be because the name referred to a double-face cards (e.g. Malakir Rebirth // Malakir Mire)
+ if (result.isEmpty() && name.contains(" // ")) {
+ // If there IS a " // " then the card could be either a double-face card (e.g. Malakir Rebirth // Malakir Mire)
+ // OR a split card (e.g. Assault // Battery).
+ // Since you can't tell based on the name, we split the text based on " // " and try the operation again with
+ // the string on the left side of " // " (double-faced cards are stored under the name on the left of the " // ").
+ queryBuilder.where().eq("name", new SelectArg(name.split(" // ", 2)[0]));
+
+ result = cardDao.query(queryBuilder.prepare());
+ }
+
+ return result;
} catch (SQLException ex) {
+ Logger.getLogger(CardRepository.class).error("Error during execution of raw sql statement", ex);
}
+
return Collections.emptyList();
}
@@ -482,6 +497,7 @@ public enum CardRepository {
queryBuilder.where().eq("className", new SelectArg(canonicalClassName));
return cardDao.query(queryBuilder.prepare());
} catch (SQLException ex) {
+ Logger.getLogger(CardRepository.class).error("Error during execution of raw sql statement", ex);
}
return Collections.emptyList();
}
@@ -492,14 +508,29 @@ public enum CardRepository {
GenericRawResults<CardInfo> rawResults = cardDao.queryRaw(
"select * from " + CardRepository.VERSION_ENTITY_NAME + " where lower_name = '" + sqlName + '\\'',
cardDao.getRawRowMapper());
- List<CardInfo> result = new ArrayList<>();
- for (CardInfo cardinfo : rawResults) {
- result.add(cardinfo);
+
+ List<CardInfo> result = rawResults.getResults();
+
+ // Got no results, could be because the name referred to a double-face cards (e.g. Malakir Rebirth // Malakir Mire)
+ if (result.isEmpty() && sqlName.contains(" // ")) {
+ // If there IS a " // " then the card could be either a double-face card (e.g. Malakir Rebirth // Malakir Mire)
+ // OR a split card (e.g. Assault // Battery).
+ // Since you can't tell based on the name, we split the text based on " // " and try the operation again with
+ // the string on the left side of " // " (double-faced cards are stored under the name on the left of the " // ").
+ String leftCardName = sqlName.split(" // ", 2)[0];
+
+ GenericRawResults<CardInfo> rawResults2 = cardDao.queryRaw(
+ "select * from " + CardRepository.VERSION_ENTITY_NAME + " where lower_name = '" + leftCardName + '\\'',
+ cardDao.getRawRowMapper());
+
+ result = rawResults2.getResults();
}
+
return result;
} catch (SQLException ex) {
Logger.getLogger(CardRepository.class).error("Error during execution of raw sql statement", ex);
}
+
return Collections.emptyList();
}
| ['Mage/src/main/java/mage/cards/repository/CardRepository.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 68,881,883 | 15,538,402 | 1,907,039 | 26,758 | 2,489 | 536 | 41 | 1 | 390 | 60 | 105 | 11 | 0 | 0 | 2022-01-30T00:40:07 | 1,605 | Java | {'Java': 83753240, 'Perl': 96893, 'Batchfile': 17988, 'Python': 5771, 'Shell': 858} | MIT License |
146 | magefree/mage/8658/4198 | magefree | mage | https://github.com/magefree/mage/issues/4198 | https://github.com/magefree/mage/pull/8658 | https://github.com/magefree/mage/pull/8658 | 1 | fixes | Orcish Spy's ability reveals cards in incorrect order | The point of using Orcish Spy is to have an inferior but repeatable effect for anticipating your draws, however there is no way to tell which card is on which library spot from the revealed cards. The order appears to be random. | 149c79992629a292450f720a72a79cf6ddb27632 | 2996b0f76f7d4b65a1fe9aeb6935cfdcd2977209 | https://github.com/magefree/mage/compare/149c79992629a292450f720a72a79cf6ddb27632...2996b0f76f7d4b65a1fe9aeb6935cfdcd2977209 | diff --git a/Mage/src/main/java/mage/players/Library.java b/Mage/src/main/java/mage/players/Library.java
index e08aab6741..01c6e90f06 100644
--- a/Mage/src/main/java/mage/players/Library.java
+++ b/Mage/src/main/java/mage/players/Library.java
@@ -168,7 +168,7 @@ public class Library implements Serializable {
}
public Set<Card> getTopCards(Game game, int amount) {
- Set<Card> cards = new HashSet<>();
+ Set<Card> cards = new LinkedHashSet<>();
Iterator<UUID> it = library.iterator();
int count = 0;
while (it.hasNext() && count < amount) { | ['Mage/src/main/java/mage/players/Library.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 69,403,227 | 15,657,789 | 1,921,773 | 26,987 | 93 | 19 | 2 | 1 | 228 | 42 | 47 | 1 | 0 | 0 | 2022-02-06T15:20:57 | 1,605 | Java | {'Java': 83753240, 'Perl': 96893, 'Batchfile': 17988, 'Python': 5771, 'Shell': 858} | MIT License |
1,336 | jooby-project/jooby/2524/2523 | jooby-project | jooby | https://github.com/jooby-project/jooby/issues/2523 | https://github.com/jooby-project/jooby/pull/2524 | https://github.com/jooby-project/jooby/pull/2524 | 1 | fixes | jooby-jackson 2.13.0 contains a breaking change when requiring ObjectMapper | ```java
package com.premiumminds.test;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.jooby.Jooby;
import io.jooby.json.JacksonModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class App extends Jooby
{
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
public static class MyObject {
private final String message;
public MyObject(final String message) {this.message = message;}
public String getMessage() {
return message;
}
}
{
install(new JacksonModule());
get("/", ctx -> {
MyObject myObject = new MyObject("Hello World");
final ObjectMapper mapper = require(ObjectMapper.class);
LOGGER.info(mapper.writeValueAsString(myObject));
return myObject;
});
}
public static void main(String[] args) {
runApp(args, App::new);
}
}
```
With jooby-jackson 2.12.0
```shell
$ curl -H 'Content-Type: application/json' http://localhost:8080/
{"message":"Hello World"}
```
With jooby-jackson 2.13.0
```shell
$ curl -H 'Content-Type: application/json' http://localhost:8080/
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<style>
body {font-family: "open sans",sans-serif; margin-left: 20px;}
h1 {font-weight: 300; line-height: 44px; margin: 25px 0 0 0;}
h2 {font-size: 16px;font-weight: 300; line-height: 44px; margin: 0;}
footer {font-weight: 300; line-height: 44px; margin-top: 10px;}
hr {background-color: #f7f7f9;}
div.trace {border:1px solid #e1e1e8; background-color: #f7f7f9;}
p {padding-left: 20px;}
p.tab {padding-left: 40px;}
</style>
<title>Server Error (500)</title>
<body>
<h1>Server Error</h1>
<hr>
<h2>message: Service not found: com.fasterxml.jackson.databind.ObjectMapper</h2>
<h2>status code: 500</h2>
</body>
</html>
```
This is related to this change https://github.com/jooby-project/jooby/commit/3189fd725b4b5fc58baa823d26667259ac361566#diff-3d2521b88af51d54e92d22f24cf540fe1f317562fd0847f7d3980b41a78da9bdR140
It was registering `ObjectMapper` but now it is registering the mapper class which is `JsonMapper` by default. | 1a0252d5dac7d3730f3e7017008c65a9ae4e5dd3 | 5ec36e8d925e9a45db002e8a63675ee7e56e1264 | https://github.com/jooby-project/jooby/compare/1a0252d5dac7d3730f3e7017008c65a9ae4e5dd3...5ec36e8d925e9a45db002e8a63675ee7e56e1264 | diff --git a/modules/jooby-jackson/src/main/java/io/jooby/json/JacksonModule.java b/modules/jooby-jackson/src/main/java/io/jooby/json/JacksonModule.java
index 42534163b..0a471050a 100644
--- a/modules/jooby-jackson/src/main/java/io/jooby/json/JacksonModule.java
+++ b/modules/jooby-jackson/src/main/java/io/jooby/json/JacksonModule.java
@@ -138,6 +138,7 @@ public class JacksonModule implements Extension, MessageDecoder, MessageEncoder
ServiceRegistry services = application.getServices();
Class mapperType = mapper.getClass();
services.put(mapperType, mapper);
+ services.put(ObjectMapper.class, mapper);
// Parsing exception as 400
application.errorCode(JsonParseException.class, StatusCode.BAD_REQUEST); | ['modules/jooby-jackson/src/main/java/io/jooby/json/JacksonModule.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,936,645 | 447,233 | 65,015 | 482 | 46 | 9 | 1 | 1 | 2,244 | 203 | 613 | 78 | 3 | 3 | 2022-02-08T12:51:21 | 1,585 | Java | {'Java': 3206985, 'Kotlin': 53463, 'PHP': 16878, 'Handlebars': 13400, 'HTML': 6645, 'Shell': 1325, 'Groovy': 943, 'Dockerfile': 424, 'FreeMarker': 108} | Apache License 2.0 |
1,337 | jooby-project/jooby/2370/2252 | jooby-project | jooby | https://github.com/jooby-project/jooby/issues/2252 | https://github.com/jooby-project/jooby/pull/2370 | https://github.com/jooby-project/jooby/pull/2370 | 1 | fixes | CWE-113 in 1.x? | At first I thought Jooby 1.x wasn't affected by [CWE-113](https://github.com/jooby-project/jooby/security/advisories/GHSA-gv3v-92v6-m48j). The `NettyHandler` instantiates `DefaultHttpHeaders` using their default constructor (i.e. `validate = true`). https://github.com/jooby-project/jooby/blob/246903dec03bd08fcbeac8bb369a8f342eb2901f/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyHandler.java#L273
However, `NettyPush` uses it differently: https://github.com/jooby-project/jooby/blob/246903dec03bd08fcbeac8bb369a8f342eb2901f/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java#L264 | 246903dec03bd08fcbeac8bb369a8f342eb2901f | 5cc23f38fb3f0a7313eb12705a5be0e95aab759f | https://github.com/jooby-project/jooby/compare/246903dec03bd08fcbeac8bb369a8f342eb2901f...5cc23f38fb3f0a7313eb12705a5be0e95aab759f | diff --git a/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java b/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java
index cf331ed7d..157f184a2 100644
--- a/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java
+++ b/modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java
@@ -261,7 +261,7 @@ public class NettyPush implements NativePushPromise {
// TODO: Is there another way of handling a push promise?
DefaultFullHttpRequest pushRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.valueOf(method.toUpperCase()), path, Unpooled.EMPTY_BUFFER,
- new DefaultHttpHeaders(false).set(streamIdHeader, nextStreamId),
+ new DefaultHttpHeaders().set(streamIdHeader, nextStreamId),
EmptyHttpHeaders.INSTANCE);
ctx.pipeline().fireChannelRead(pushRequest);
ctx.pipeline().fireChannelReadComplete(); | ['modules/jooby-netty/src/main/java/org/jooby/internal/netty/NettyPush.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 8,079,522 | 1,746,747 | 180,044 | 495 | 146 | 31 | 2 | 1 | 625 | 29 | 201 | 3 | 3 | 0 | 2021-06-22T12:09:08 | 1,585 | Java | {'Java': 3206985, 'Kotlin': 53463, 'PHP': 16878, 'Handlebars': 13400, 'HTML': 6645, 'Shell': 1325, 'Groovy': 943, 'Dockerfile': 424, 'FreeMarker': 108} | Apache License 2.0 |
1,339 | jooby-project/jooby/2171/2167 | jooby-project | jooby | https://github.com/jooby-project/jooby/issues/2167 | https://github.com/jooby-project/jooby/pull/2171 | https://github.com/jooby-project/jooby/pull/2171 | 1 | fixes | Memory leak in the jooby-netty implementation | In our project we use Jooby, and have more recently upgraded to version 2, currently running on the latest version 2.9.4.
I've spent a couple of days hunting down a memory leak within our application and have found out that the class:
`io.netty.handler.codec.DefaultHeaders$HeaderEntry` is causing some issue.
**How to recreate:**
Create an empty project with latest version of Jooby + Netty and Gradle. Run a POST-request to the running server, examine that the above mentioned class increases for every call (which is normal), but GC only manages to handle about 60% of it.
I have also created a project with Ktor + Netty, but this does NOT happen there, GC manages to clear what it should.
I have not looked into the implementation here, but hopefully you might now where to look?
It does seem to only apply to POST-requests, and it is quite hard to see it, we found it because we did not deploy for 4-5 days straight.
We have swapped to Jetty to test, and cannot see any memory leakage like this.
Please let me know if more information is required to solve this issue
| e761210942bc24d810e383820d1e577fed15c1fb | 870cc1cb947d07c19f37d520c87ceb83b046a810 | https://github.com/jooby-project/jooby/compare/e761210942bc24d810e383820d1e577fed15c1fb...870cc1cb947d07c19f37d520c87ceb83b046a810 | diff --git a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/HttpRawPostRequestDecoder.java b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/HttpRawPostRequestDecoder.java
index 0834c7396..3df896914 100644
--- a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/HttpRawPostRequestDecoder.java
+++ b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/HttpRawPostRequestDecoder.java
@@ -6,8 +6,10 @@
package io.jooby.internal.netty;
import io.netty.handler.codec.http.HttpContent;
+import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.codec.http.multipart.HttpData;
+import io.netty.handler.codec.http.multipart.HttpDataFactory;
import io.netty.handler.codec.http.multipart.HttpPostRequestDecoder;
import io.netty.handler.codec.http.multipart.InterfaceHttpData;
import io.netty.handler.codec.http.multipart.InterfaceHttpPostRequestDecoder;
@@ -18,10 +20,15 @@ import java.util.List;
public class HttpRawPostRequestDecoder implements InterfaceHttpPostRequestDecoder {
+ private HttpRequest request;
+ private HttpDataFactory factory;
+
private HttpData data;
- public HttpRawPostRequestDecoder(HttpData data) {
- this.data = data;
+ public HttpRawPostRequestDecoder(HttpDataFactory factory, HttpRequest request) {
+ this.factory = factory;
+ this.request = request;
+ this.data = factory.createAttribute(request, "body");
}
@Override public boolean isMultipart() {
@@ -69,12 +76,16 @@ public class HttpRawPostRequestDecoder implements InterfaceHttpPostRequestDecode
}
@Override public void destroy() {
+ cleanFiles();
+ removeHttpDataFromClean(data);
data.delete();
}
@Override public void cleanFiles() {
+ factory.cleanRequestHttpData(request);
}
@Override public void removeHttpDataFromClean(InterfaceHttpData data) {
+ factory.removeHttpDataFromClean(request, data);
}
}
diff --git a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyHandler.java b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyHandler.java
index 82d180bd4..5e0922bf6 100644
--- a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyHandler.java
+++ b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyHandler.java
@@ -202,7 +202,7 @@ public class NettyHandler extends ChannelInboundHandlerAdapter {
return new HttpPostStandardRequestDecoder(factory, request, StandardCharsets.UTF_8);
}
}
- return new HttpRawPostRequestDecoder(factory.createAttribute(request, "body"));
+ return new HttpRawPostRequestDecoder(factory, request);
}
static String pathOnly(String uri) { | ['modules/jooby-netty/src/main/java/io/jooby/internal/netty/HttpRawPostRequestDecoder.java', 'modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyHandler.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,885,506 | 435,818 | 63,375 | 467 | 755 | 149 | 17 | 2 | 1,094 | 188 | 244 | 17 | 0 | 0 | 2020-12-08T19:05:27 | 1,585 | Java | {'Java': 3206985, 'Kotlin': 53463, 'PHP': 16878, 'Handlebars': 13400, 'HTML': 6645, 'Shell': 1325, 'Groovy': 943, 'Dockerfile': 424, 'FreeMarker': 108} | Apache License 2.0 |
1,338 | jooby-project/jooby/2220/2210 | jooby-project | jooby | https://github.com/jooby-project/jooby/issues/2210 | https://github.com/jooby-project/jooby/pull/2220 | https://github.com/jooby-project/jooby/pull/2220 | 1 | fixes | Websockets onClose not working | Hi!
For some weird reason, the onClose event is not called on my WebSockets when setting a handler using the configurator.
Regards
Dominik | f7bd9284d48b589086e30339c9f174e15f662ba5 | e105611c98986f6e9f9eaaa72e0efc43a1bfc247 | https://github.com/jooby-project/jooby/compare/f7bd9284d48b589086e30339c9f174e15f662ba5...e105611c98986f6e9f9eaaa72e0efc43a1bfc247 | diff --git a/jooby/src/main/java/io/jooby/Server.java b/jooby/src/main/java/io/jooby/Server.java
index e0197145c..8868970aa 100644
--- a/jooby/src/main/java/io/jooby/Server.java
+++ b/jooby/src/main/java/io/jooby/Server.java
@@ -139,8 +139,10 @@ public interface Server {
String message = cause.getMessage();
if (message != null) {
String msg = message.toLowerCase();
- return msg.contains("reset by peer") || msg.contains("broken pipe") || msg
- .contains("forcibly closed");
+ return msg.contains("reset by peer")
+ || msg.contains("broken pipe")
+ || msg.contains("forcibly closed")
+ || msg.contains("connection reset");
}
}
return (cause instanceof ClosedChannelException) || (cause instanceof EOFException);
diff --git a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyWebSocket.java b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyWebSocket.java
index fe1d2dfda..434461504 100644
--- a/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyWebSocket.java
+++ b/modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyWebSocket.java
@@ -5,6 +5,17 @@
*/
package io.jooby.internal.netty;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.CountDownLatch;
+
+import javax.annotation.Nonnull;
+
import io.jooby.Context;
import io.jooby.Router;
import io.jooby.Server;
@@ -24,16 +35,6 @@ import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
import io.netty.handler.codec.http.websocketx.WebSocketFrame;
import io.netty.util.AttributeKey;
-import javax.annotation.Nonnull;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.CountDownLatch;
-
public class NettyWebSocket implements WebSocketConfigurer, WebSocket, ChannelFutureListener {
/** All connected websocket. */
private static final ConcurrentMap<String, List<NettyWebSocket>> all = new ConcurrentHashMap<>();
@@ -155,20 +156,22 @@ public class NettyWebSocket implements WebSocketConfigurer, WebSocket, ChannelFu
private void handleMessage(WebSocketFrame frame) {
try {
- if (frame.isFinalFragment()) {
- ByteBuf content;
- if (buffer != null) {
- buffer.writeBytes(frame.content());
- content = buffer;
- buffer = null;
+ if (messageCallback != null) {
+ if (frame.isFinalFragment()) {
+ ByteBuf content;
+ if (buffer != null) {
+ buffer.writeBytes(frame.content());
+ content = buffer;
+ buffer = null;
+ } else {
+ content = frame.content();
+ }
+ WebSocketMessage message = WebSocketMessage.create(getContext(), array(content));
+
+ fireCallback(webSocketTask(() -> messageCallback.onMessage(this, message), false));
} else {
- content = frame.content();
+ buffer = Unpooled.copiedBuffer(frame.content());
}
- WebSocketMessage message = WebSocketMessage.create(getContext(), array(content));
-
- fireCallback(webSocketTask(() -> messageCallback.onMessage(this, message), false));
- } else {
- buffer = Unpooled.copiedBuffer(frame.content());
}
} finally {
frame.release();
diff --git a/modules/jooby-utow/src/main/java/io/jooby/internal/utow/UtowWebSocket.java b/modules/jooby-utow/src/main/java/io/jooby/internal/utow/UtowWebSocket.java
index db5f5c60d..85d3aec73 100644
--- a/modules/jooby-utow/src/main/java/io/jooby/internal/utow/UtowWebSocket.java
+++ b/modules/jooby-utow/src/main/java/io/jooby/internal/utow/UtowWebSocket.java
@@ -198,12 +198,19 @@ public class UtowWebSocket extends AbstractReceiveListener
@Override protected void onError(WebSocketChannel channel, Throwable x) {
// should close?
if (Server.connectionLost(x) || SneakyThrows.isFatal(x)) {
- handleClose(WebSocketCloseStatus.SERVER_ERROR);
+ if (channel.isOpen()) {
+ handleClose(WebSocketCloseStatus.SERVER_ERROR);
+ }
}
if (onErrorCallback == null) {
- ctx.getRouter().getLog()
- .error("Websocket resulted in exception: {}", ctx.getRequestPath(), x);
+ if (Server.connectionLost(x)) {
+ ctx.getRouter().getLog()
+ .debug("Websocket connection lost: {}", ctx.getRequestPath(), x);
+ } else {
+ ctx.getRouter().getLog()
+ .error("Websocket resulted in exception: {}", ctx.getRequestPath(), x);
+ }
} else {
onErrorCallback.onError(this, x);
}
@@ -215,8 +222,10 @@ public class UtowWebSocket extends AbstractReceiveListener
@Override protected void onCloseMessage(CloseMessage cm,
WebSocketChannel channel) {
- handleClose(WebSocketCloseStatus.valueOf(cm.getCode())
- .orElseGet(() -> new WebSocketCloseStatus(cm.getCode(), cm.getReason())));
+ if (channel.isOpen()) {
+ handleClose(WebSocketCloseStatus.valueOf(cm.getCode())
+ .orElseGet(() -> new WebSocketCloseStatus(cm.getCode(), cm.getReason())));
+ }
}
private void handleClose(WebSocketCloseStatus closeStatus) { | ['modules/jooby-utow/src/main/java/io/jooby/internal/utow/UtowWebSocket.java', 'jooby/src/main/java/io/jooby/Server.java', 'modules/jooby-netty/src/main/java/io/jooby/internal/netty/NettyWebSocket.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,905,746 | 440,530 | 64,015 | 475 | 2,972 | 557 | 72 | 3 | 145 | 23 | 32 | 6 | 0 | 0 | 2021-01-03T22:44:22 | 1,585 | Java | {'Java': 3206985, 'Kotlin': 53463, 'PHP': 16878, 'Handlebars': 13400, 'HTML': 6645, 'Shell': 1325, 'Groovy': 943, 'Dockerfile': 424, 'FreeMarker': 108} | Apache License 2.0 |
1,000 | zalando/logbook/654/653 | zalando | logbook | https://github.com/zalando/logbook/issues/653 | https://github.com/zalando/logbook/pull/654 | https://github.com/zalando/logbook/pull/654 | 2 | fixes | All null JSON fields get masked | If some field is `null`, it gets replaced by `XXX` by default. I couldn't find any way to change it other than copy-pasting and tweaking `JsonBodyFilters`.
```
public static void main(String[] args) {
val result = BodyFilters.defaultValue().filter(MediaType.APPLICATION_JSON_UTF8_VALUE, "{\\n" +
" \\"myRandomField\\": null\\n" +
"}");
System.out.println(result);
}
```
Prints this:
`{"myRandomField":"XXX"}` | 25a52ee8ff71e1bbeeafcef9d12ad8626e842c96 | 1943c06b122ee0fce09bb27ef8daba6366c5be79 | https://github.com/zalando/logbook/compare/25a52ee8ff71e1bbeeafcef9d12ad8626e842c96...1943c06b122ee0fce09bb27ef8daba6366c5be79 | diff --git a/logbook-json/src/main/java/org/zalando/logbook/json/JsonBodyFilters.java b/logbook-json/src/main/java/org/zalando/logbook/json/JsonBodyFilters.java
index a9f32ad0..0e951e14 100644
--- a/logbook-json/src/main/java/org/zalando/logbook/json/JsonBodyFilters.java
+++ b/logbook-json/src/main/java/org/zalando/logbook/json/JsonBodyFilters.java
@@ -27,7 +27,6 @@ public final class JsonBodyFilters {
return replaceJsonStringProperty(properties, "XXX");
}
-
/**
* Creates a {@link BodyFilter} that replaces the properties in the json response with the replacement passed as argument.
* This {@link BodyFilter} works on all levels inside the json tree and it only works with string values<br><br>
@@ -61,7 +60,7 @@ public final class JsonBodyFilters {
private static BodyFilter replacePrimitiveJsonProperty(final Set<String> properties, final String value, final String replacement) {
final String regex = properties.stream().map(Pattern::quote).collect(joining("|"));
final String property = "\\"(?:" + regex + ")\\"";
- final Pattern pattern = compile("(" + property + "\\\\s*:\\\\s*)" + value + "|null");
+ final Pattern pattern = compile("(" + property + "\\\\s*:\\\\s*)(" + value + "|null)");
final UnaryOperator<String> delegate = body -> pattern.matcher(body).replaceAll("$1" + replacement);
return (contentType, body) ->
diff --git a/logbook-json/src/test/java/org/zalando/logbook/json/JsonBodyFiltersTest.java b/logbook-json/src/test/java/org/zalando/logbook/json/JsonBodyFiltersTest.java
index 2abb91b9..df0e93f6 100644
--- a/logbook-json/src/test/java/org/zalando/logbook/json/JsonBodyFiltersTest.java
+++ b/logbook-json/src/test/java/org/zalando/logbook/json/JsonBodyFiltersTest.java
@@ -93,6 +93,15 @@ class JsonBodyFiltersTest {
assertThat(actual, is("{\\"foo\\":\\"XXX\\",\\"bar\\":\\"public\\"}"));
}
+ @Test
+ void shouldFilterMatchingNullPropertyOnly() {
+ final BodyFilter unit = replaceJsonStringProperty(singleton("foo"), "XXX");
+
+ final String actual = unit.filter(contentType, "{\\"foo\\":null,\\"bar\\":null}");
+
+ assertThat(actual, is("{\\"foo\\":\\"XXX\\",\\"bar\\":null}"));
+ }
+
@Test
void shouldFilterAccessTokens() {
final BodyFilter unit = new AccessTokenBodyFilter(); | ['logbook-json/src/test/java/org/zalando/logbook/json/JsonBodyFiltersTest.java', 'logbook-json/src/main/java/org/zalando/logbook/json/JsonBodyFilters.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 254,442 | 50,270 | 8,729 | 139 | 185 | 46 | 3 | 1 | 449 | 50 | 108 | 12 | 0 | 1 | 2019-12-02T19:52:24 | 1,553 | Java | {'Java': 1005854, 'Kotlin': 36603, 'Shell': 1136} | MIT License |
997 | zalando/logbook/798/797 | zalando | logbook | https://github.com/zalando/logbook/issues/797 | https://github.com/zalando/logbook/pull/798 | https://github.com/zalando/logbook/pull/798 | 1 | fixes | Missing setter for FormRequestMode in compiled/released JAR | ## Description
I upgraded the dependency logbook-spring-boot-starter to the latest version (2.1.2). When I wanted to set the formRequestMode via `logbook.filter.form-request-mode`, I was greeted with the following message on startup:
```
***************************
APPLICATION FAILED TO START
***************************
Description:
Failed to bind properties under 'logbook.filter' to org.zalando.logbook.autoconfigure.LogbookProperties$Filter:
Property: logbook.filter.form-request-mode
Value: parameter
Origin: class path resource [application.yml]:76:29
Reason: No setter found for property: form-request-mode
```
I was a little confused since the code contains a @Setter lombok annotation. But checking the decompiled class code shows the following:
```
public final class LogbookProperties {
[...]
public static class Filter {
private final FormRequestMode formRequestMode = FormRequestMode.fromProperties();
public Filter() {
}
@Generated
public FormRequestMode getFormRequestMode() {
return this.formRequestMode;
}
}
}
```
Turns out, the setter is actually missing.
## Expected Behavior
* Lombok setter annotation is processed correctly
* configuration works as intended
## Actual Behavior
* configuring the form-request-mode results in the application not starting up.
## Possible Fix
Remove final from the respective variable.
## Steps to Reproduce
1. Use dependency of logbook-spring-boot-starter in version 2.1.2
2. Configure logbook.filter.form-request-mode
3. Try to start spring boot application.
4. Application does not start.
## Your Environment
* Version used: 2.1.2
| 97310e4f4b91a3481e49c15693d6e6ae1f131765 | d0b995689f01a5efccb04d91f985311964e341aa | https://github.com/zalando/logbook/compare/97310e4f4b91a3481e49c15693d6e6ae1f131765...d0b995689f01a5efccb04d91f985311964e341aa | diff --git a/logbook-spring-boot-autoconfigure/src/main/java/org/zalando/logbook/autoconfigure/LogbookProperties.java b/logbook-spring-boot-autoconfigure/src/main/java/org/zalando/logbook/autoconfigure/LogbookProperties.java
index 4afb650f..0ec93e0b 100644
--- a/logbook-spring-boot-autoconfigure/src/main/java/org/zalando/logbook/autoconfigure/LogbookProperties.java
+++ b/logbook-spring-boot-autoconfigure/src/main/java/org/zalando/logbook/autoconfigure/LogbookProperties.java
@@ -39,7 +39,7 @@ public final class LogbookProperties {
@Getter
@Setter
public static class Filter {
- private final FormRequestMode formRequestMode = FormRequestMode.fromProperties();
+ private FormRequestMode formRequestMode = FormRequestMode.fromProperties();
}
} | ['logbook-spring-boot-autoconfigure/src/main/java/org/zalando/logbook/autoconfigure/LogbookProperties.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 293,350 | 57,691 | 10,170 | 163 | 175 | 31 | 2 | 1 | 1,747 | 197 | 358 | 61 | 0 | 2 | 2020-08-03T15:16:02 | 1,553 | Java | {'Java': 1005854, 'Kotlin': 36603, 'Shell': 1136} | MIT License |
996 | zalando/logbook/846/842 | zalando | logbook | https://github.com/zalando/logbook/issues/842 | https://github.com/zalando/logbook/pull/846 | https://github.com/zalando/logbook/pull/846 | 1 | fixes | ChunkingSink does not write Requests/Responses without body | ## Detailed Description
ChunkingSink should write requests/responses without a body. It should use the delegate sink directly as it only chunks the bodies.
## Your Environment
* Version used: 2.1.0
## Reproducing
```java
// create a sink which prints to console
var sink = new ChunkingSink(new DefaultSink(new DefaultHttpLogFormatter(), new HttpLogWriter() {
@Override
public void write(Precorrelation precorrelation, String request) throws IOException {
System.out.println(request);
}
@Override
public void write(Correlation correlation, String response) throws IOException {
System.out.println(response);
}
}), 100);
// just use whatever does not matter for this test
var corr = new Correlation() {
@Override
public String getId() {
return UUID.randomUUID().toString();
}
@Override
public Instant getStart() {
return Instant.now();
}
@Override
public Instant getEnd() {
return Instant.now();
}
@Override
public Duration getDuration() {
return Duration.ofSeconds(5);
}
};
// will be printed
sink.write(corr, MockHttpRequest.create().withBodyAsString("bodyAsString"));
sink.write(corr, MockHttpRequest.create().withBodyAsString("bodyAsString"), MockHttpResponse.create().withBodyAsString("bodyAsString"));
// will not be printed
sink.write(corr, MockHttpRequest.create());
sink.write(corr, MockHttpRequest.create(), MockHttpResponse.create());
``` | 7b871333107b09b5262172a94d97810a6ccd36d1 | 71e4be571f388dfee86deac463339a50e8877efe | https://github.com/zalando/logbook/compare/7b871333107b09b5262172a94d97810a6ccd36d1...71e4be571f388dfee86deac463339a50e8877efe | diff --git a/logbook-core/src/main/java/org/zalando/logbook/ChunkingSink.java b/logbook-core/src/main/java/org/zalando/logbook/ChunkingSink.java
index b5676309..617786f2 100644
--- a/logbook-core/src/main/java/org/zalando/logbook/ChunkingSink.java
+++ b/logbook-core/src/main/java/org/zalando/logbook/ChunkingSink.java
@@ -48,6 +48,10 @@ public final class ChunkingSink implements Sink {
}
private Stream<String> chunk(final HttpMessage message) throws IOException {
+ if (message.getBodyAsString().isEmpty()) {
+ return Stream.of("");
+ }
+
return stream(new ChunkingSpliterator(message.getBodyAsString(), minChunkSize, maxChunkSize), false);
}
diff --git a/logbook-core/src/test/java/org/zalando/logbook/ChunkingSinkTest.java b/logbook-core/src/test/java/org/zalando/logbook/ChunkingSinkTest.java
index 13bfe06d..76ea5167 100644
--- a/logbook-core/src/test/java/org/zalando/logbook/ChunkingSinkTest.java
+++ b/logbook-core/src/test/java/org/zalando/logbook/ChunkingSinkTest.java
@@ -29,10 +29,19 @@ final class ChunkingSinkTest {
private final Sink unit = new ChunkingSink(delegate, 20);
@Test
- void shouldDelegateActive() {
+ void delegatesActive() {
assertThat(unit.isActive(), is(false));
}
+ @Test
+ void ignoresEmptyBodies() throws IOException {
+ final List<String> strings = captureRequest("");
+
+ assertThat(strings, contains(
+ allOf(startsWith("Incoming Request"))
+ ));
+ }
+
@Test
void shouldWriteSingleRequestIfLengthNotExceeded() throws IOException {
final List<String> chunks = captureRequest("HelloWorld"); | ['logbook-core/src/test/java/org/zalando/logbook/ChunkingSinkTest.java', 'logbook-core/src/main/java/org/zalando/logbook/ChunkingSink.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 293,514 | 57,725 | 10,176 | 163 | 99 | 17 | 4 | 1 | 1,457 | 146 | 306 | 60 | 0 | 1 | 2020-09-29T12:15:57 | 1,553 | Java | {'Java': 1005854, 'Kotlin': 36603, 'Shell': 1136} | MIT License |
1,002 | zalando/logbook/617/603 | zalando | logbook | https://github.com/zalando/logbook/issues/603 | https://github.com/zalando/logbook/pull/617 | https://github.com/zalando/logbook/pull/617 | 2 | fixes | Logbook removes body from response | I encountered an issue in combination with CXF and Spring Security where the body of an response vanishes (=is null) when using Logbook.
## Description
In Logbook version 1.13.0 the body of the response vanishes when the Writer Level is set to something that would lead to requests not getting logged (=INFO).
Using Logbook version 2.0.0-RC.33 the behaviour is inverted, e. g. the body disappears when the level would actually log it (=TRACE).
The Logbook Writer Level setting influences this behaviour.
The setup is very simple:
The Spring Boot App exposes a CXF service list under /services. This resource is accessible for everyone (permitAll).
GET requests to /services do not yield any body when logging is turned _off_ by changing the log level of Logbook. When I turn the logging _on_, the body is contained in the response as it would be without Logbook.
## Expected Behavior
The response should contain the original body.
## Actual Behavior
The body of the response is null.
## Possible Fix
I have no idea, sadly.
## Steps to Reproduce
Reproduce erroneus behaviour :
1. Check out https://github.com/Breeki32/logbooktest113
2. Start app and send GET request to /services
or simply run the tests
Reproduce correct behaviour :
1. Check out https://github.com/Breeki32/logbooktest113
2. set logging.level.org.zalando property to TRACE
2. Start app and send GET request to /services
or simply run the tests
Reproduce erroneus behaviour with **2.0.0-RC.3**:
1. Check out https://github.com/Breeki32/logbooktest113
2. set Logbook version to 2.0.0-RC.3 in pom.xml
2. set logging.level.org.zalando property to TRACE
or simply run the tests with given version
## Context
I´ve noticed that certain things did not work in an web application when the log level was set to INFO. I tracked it down to the combination of CXF, Spring Security and Logbook (the minimal setup in the example project). The use case was logging the traffic to and from SOAP services for development purposes - but I noticed missing data in the service list servlet.
## Your Environment
* Version used: Logbook 1.13.0 - 2.0.0-RC3
* Spring Boot 2.1.6 - 2.1.8
* CXF 3.3.2 - 3.3.3
* Link to your project: https://github.com/Breeki32/logbooktest113
Thank you for looking into this! | 0c209bdf33c0399919b8d896845fc565d7f5d291 | 7a2150fa596525b9dfc5fc85fbfd78dea5d9a8a1 | https://github.com/zalando/logbook/compare/0c209bdf33c0399919b8d896845fc565d7f5d291...7a2150fa596525b9dfc5fc85fbfd78dea5d9a8a1 | diff --git a/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LocalResponse.java b/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LocalResponse.java
index 6e02255e..fe6ce083 100644
--- a/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LocalResponse.java
+++ b/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LocalResponse.java
@@ -106,6 +106,14 @@ final class LocalResponse extends HttpServletResponseWrapper implements HttpResp
}
}
+ @Override
+ public void flushBuffer() throws IOException {
+ if (buffer != null) {
+ buffer.flush();
+ }
+ super.flushBuffer();
+ }
+
@Override
public byte[] getBody() {
return body == null ? new byte[0] : body.getBytes();
@@ -135,6 +143,14 @@ final class LocalResponse extends HttpServletResponseWrapper implements HttpResp
return writer;
}
+ void flush() throws IOException {
+ if (writer == null) {
+ output.flush();
+ } else {
+ writer.flush();
+ }
+ }
+
byte[] getBytes() {
if (bytes == null) {
bytes = branch.toByteArray();
diff --git a/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LogbookFilter.java b/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LogbookFilter.java
index 1e8471cd..916f3120 100644
--- a/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LogbookFilter.java
+++ b/logbook-servlet/src/main/java/org/zalando/logbook/servlet/LogbookFilter.java
@@ -46,7 +46,7 @@ public final class LogbookFilter implements HttpFilter {
final RemoteRequest request = new RemoteRequest(httpRequest);
final LocalResponse response = new LocalResponse(httpResponse, request.getProtocolVersion());
- final ResponseWritingStage stage = logRequest(request, request).process(response);
+ final ResponseWritingStage stage = logRequest(request).process(response);
chain.doFilter(request, response);
@@ -58,14 +58,12 @@ public final class LogbookFilter implements HttpFilter {
stage.write();
}
- private ResponseProcessingStage logRequest(final HttpServletRequest httpRequest,
- final HttpRequest request) throws IOException {
-
- if (httpRequest.getDispatcherType() == DispatcherType.ASYNC) {
- return (ResponseProcessingStage) httpRequest.getAttribute(STAGE);
+ private ResponseProcessingStage logRequest(final RemoteRequest request) throws IOException {
+ if (request.getDispatcherType() == DispatcherType.ASYNC) {
+ return (ResponseProcessingStage) request.getAttribute(STAGE);
} else {
final ResponseProcessingStage stage = process(request).write();
- httpRequest.setAttribute(STAGE, stage);
+ request.setAttribute(STAGE, stage);
return stage;
}
}
diff --git a/logbook-servlet/src/test/java/org/zalando/logbook/servlet/ExampleController.java b/logbook-servlet/src/test/java/org/zalando/logbook/servlet/ExampleController.java
index 032b526f..6a3dbf8f 100644
--- a/logbook-servlet/src/test/java/org/zalando/logbook/servlet/ExampleController.java
+++ b/logbook-servlet/src/test/java/org/zalando/logbook/servlet/ExampleController.java
@@ -13,7 +13,6 @@ import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
-import java.io.PrintWriter;
import java.nio.CharBuffer;
import java.util.Objects;
import java.util.concurrent.Callable;
@@ -95,9 +94,7 @@ public class ExampleController {
@RequestMapping(path = "/reader", produces = TEXT_PLAIN_VALUE)
public void reader(final HttpServletRequest request, final HttpServletResponse response) throws IOException {
- try (final PrintWriter writer = response.getWriter()) {
- copy(request.getReader(), writer);
- }
+ copy(request.getReader(), response.getWriter());
}
@RequestMapping(path = "/binary", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
diff --git a/logbook-servlet/src/test/java/org/zalando/logbook/servlet/LocalResponseTest.java b/logbook-servlet/src/test/java/org/zalando/logbook/servlet/LocalResponseTest.java
index b94e6c62..366089d2 100644
--- a/logbook-servlet/src/test/java/org/zalando/logbook/servlet/LocalResponseTest.java
+++ b/logbook-servlet/src/test/java/org/zalando/logbook/servlet/LocalResponseTest.java
@@ -94,6 +94,12 @@ class LocalResponseTest {
verifyNoMoreInteractions(mock);
}
+ @Test
+ void shouldDelegateClose() throws IOException {
+ unit.withBody();
+ unit.getOutputStream().close();
+ }
+
@Test
void shouldTeeGetWriter() throws IOException {
unit.withBody(); | ['logbook-servlet/src/main/java/org/zalando/logbook/servlet/LocalResponse.java', 'logbook-servlet/src/main/java/org/zalando/logbook/servlet/LogbookFilter.java', 'logbook-servlet/src/test/java/org/zalando/logbook/servlet/ExampleController.java', 'logbook-servlet/src/test/java/org/zalando/logbook/servlet/LocalResponseTest.java'] | {'.java': 4} | 4 | 4 | 0 | 0 | 4 | 231,247 | 46,285 | 7,674 | 139 | 1,187 | 200 | 28 | 2 | 2,317 | 358 | 576 | 53 | 4 | 0 | 2019-10-09T19:33:42 | 1,553 | Java | {'Java': 1005854, 'Kotlin': 36603, 'Shell': 1136} | MIT License |
304 | j-easy/easy-random/95/84 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/84 | https://github.com/j-easy/easy-random/pull/95 | https://github.com/j-easy/easy-random/pull/95 | 1 | fixes | "Pre-initialized" Bean Fields are nulled | When writing a dto, I often initialize collections to ensure they are never null. It appears that objenesis's approach to instantiation ends up producing an instance will null fields.Here's some code to show it. The first assert will pass, and the second will fail. Perhaps there is a perspective where this is desired behavior, but it was surprising to me, and not what I would prefer so I thought I would share.
`public class Main {
public static void main(String[] args) {
EnhancedRandomBuilder builder = new EnhancedRandomBuilder();
builder.exclude(new FieldDefinition<>("myList", List.class, MyBean.class));
```
assert new MyBean().getMyList().size() == 0;
assert builder.build().nextObject(MyBean.class).getMyList().size() == 0;
}
public static class MyBean {
private List<String> myList = new ArrayList<>();
public List<String> getMyList() {
return myList;
}
public void setMyList(List<String> myList) {
this.myList = myList;
}
}
```
}
`
| df17980c6dd3c6bdc3ba1d1e51aad9c69725f29c | 863d4c75856fef0b743b53370e1258ba15b18d47 | https://github.com/j-easy/easy-random/compare/df17980c6dd3c6bdc3ba1d1e51aad9c69725f29c...863d4c75856fef0b743b53370e1258ba15b18d47 | diff --git a/random-beans/src/main/java/io/github/benas/randombeans/ObjectFactory.java b/random-beans/src/main/java/io/github/benas/randombeans/ObjectFactory.java
index 28c0b2fd..546bda9f 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/ObjectFactory.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/ObjectFactory.java
@@ -50,18 +50,24 @@ class ObjectFactory {
private boolean scanClasspathForConcreteTypes;
<T> T createInstance(final Class<T> type) {
- T result;
if (scanClasspathForConcreteTypes && isAbstract(type)) {
Class<?> randomConcreteSubType = randomElementOf(getPublicConcreteSubTypesOf((type)));
if (randomConcreteSubType == null) {
throw new InstantiationError("Unable to find a matching concrete subtype of type: " + type + " in the classpath");
} else {
- result = (T) objenesis.newInstance(randomConcreteSubType);
+ return (T) createNewInstance(randomConcreteSubType);
}
} else {
- result = objenesis.newInstance(type);
+ return createNewInstance(type);
+ }
+ }
+
+ private <T> T createNewInstance(final Class<T> type) {
+ try {
+ return type.newInstance();
+ } catch (Exception exception) {
+ return objenesis.newInstance(type);
}
- return result;
}
Collection<?> createEmptyCollectionForType(Class<?> fieldType, int initialSize) {
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/FieldExclusionTest.java b/random-beans/src/test/java/io/github/benas/randombeans/FieldExclusionTest.java
index a4e4465f..a0160ec7 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/FieldExclusionTest.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/FieldExclusionTest.java
@@ -40,6 +40,9 @@ import static io.github.benas.randombeans.FieldDefinitionBuilder.field;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
+import java.util.ArrayList;
+import java.util.List;
+
@RunWith(MockitoJUnitRunner.class)
public class FieldExclusionTest {
@@ -218,4 +221,28 @@ public class FieldExclusionTest {
assertThat(c.getB3()).isNotNull();
}
+ @Test
+ public void whenFieldIsExcluded_thenItsInlineInitializationShouldBeUsedAsIs() {
+ enhancedRandom = aNewEnhancedRandomBuilder()
+ .exclude(new FieldDefinition<>("myList", List.class, InlineInitializationBean.class))
+ .build();
+
+ InlineInitializationBean bean = enhancedRandom.nextObject(InlineInitializationBean.class);
+
+ assertThat(bean).isNotNull();
+ assertThat(bean.getMyList()).isEmpty();
+ }
+
+ public static class InlineInitializationBean {
+ private List<String> myList = new ArrayList<>();
+
+ public List<String> getMyList() {
+ return myList;
+ }
+
+ public void setMyList(List<String> myList) {
+ this.myList = myList;
+ }
+ }
+
} | ['random-beans/src/test/java/io/github/benas/randombeans/FieldExclusionTest.java', 'random-beans/src/main/java/io/github/benas/randombeans/ObjectFactory.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 370,684 | 81,733 | 10,412 | 132 | 509 | 96 | 14 | 1 | 1,015 | 133 | 226 | 28 | 0 | 1 | 2016-03-31T20:18:50 | 1,478 | Java | {'Java': 1051325} | MIT License |
303 | j-easy/easy-random/138/135 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/135 | https://github.com/j-easy/easy-random/pull/138 | https://github.com/j-easy/easy-random/pull/138 | 2 | fixes | Unable to exclude java.time.Instant from being randomized | Given a configuration like:
```
EnhancedRandomBuilder.aNewEnhancedRandomBuilder()
.exclude(FieldDefinitionBuilder.field().named("createdAt").ofType(Instant.class).inClass(Foo.class).get())
.build();
```
The `createdAt` field is still populated.
Reason is that `TimeRandomizeRegistry` has higher priority than `CustomRandomizerRegistry`.
| e534c0009e55585083e3379d995faf1afb0221a5 | 193854a1215c2cecf8a8b771a411eb48bcab9a57 | https://github.com/j-easy/easy-random/compare/e534c0009e55585083e3379d995faf1afb0221a5...193854a1215c2cecf8a8b771a411eb48bcab9a57 | diff --git a/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/TimeRandomizerRegistry.java b/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/TimeRandomizerRegistry.java
index 81e494a6..8011492f 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/TimeRandomizerRegistry.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/TimeRandomizerRegistry.java
@@ -23,6 +23,7 @@
*/
package io.github.benas.randombeans.randomizers.registry;
+import io.github.benas.randombeans.annotation.Priority;
import io.github.benas.randombeans.api.Randomizer;
import io.github.benas.randombeans.api.RandomizerRegistry;
import io.github.benas.randombeans.randomizers.time.*;
@@ -39,6 +40,7 @@ import java.util.TimeZone;
*
* @author Mahmoud Ben Hassine ([email protected])
*/
+@Priority(-255)
public class TimeRandomizerRegistry implements RandomizerRegistry {
private final Map<Class<?>, Randomizer<?>> randomizers = new HashMap<>();
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/randomizers/time/TimeSupportTest.java b/random-beans/src/test/java/io/github/benas/randombeans/randomizers/time/TimeSupportTest.java
index a4ae82dc..9c4b4694 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/randomizers/time/TimeSupportTest.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/randomizers/time/TimeSupportTest.java
@@ -25,8 +25,12 @@
package io.github.benas.randombeans.randomizers.time;
import io.github.benas.randombeans.EnhancedRandomBuilder;
+import io.github.benas.randombeans.FieldDefinitionBuilder;
import io.github.benas.randombeans.api.EnhancedRandom;
import io.github.benas.randombeans.beans.TimeBean;
+
+import java.time.Instant;
+
import org.junit.Before;
import org.junit.Test;
@@ -60,4 +64,16 @@ public class TimeSupportTest {
assertThat(timeBean.getZonedDateTime()).isNotNull();
assertThat(timeBean.getZoneOffset()).isNotNull();
}
+
+ @Test
+ @SuppressWarnings("unchecked")
+ // https://github.com/benas/random-beans/issues/135
+ public void threeTenRandomizersCanBeOverridenByCustomRandomizers() {
+ EnhancedRandom customEnhancedRandom = EnhancedRandomBuilder.aNewEnhancedRandomBuilder()
+ .exclude(FieldDefinitionBuilder.field().named("instant").ofType(Instant.class).inClass(TimeBean.class).get()).build();
+
+ TimeBean timeBean = customEnhancedRandom.nextObject(TimeBean.class);
+
+ assertThat(timeBean.getInstant()).isNull();
+ }
} | ['random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/TimeRandomizerRegistry.java', 'random-beans/src/test/java/io/github/benas/randombeans/randomizers/time/TimeSupportTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 410,288 | 90,298 | 11,648 | 141 | 73 | 15 | 2 | 1 | 359 | 24 | 78 | 12 | 0 | 1 | 2016-05-29T10:43:45 | 1,478 | Java | {'Java': 1051325} | MIT License |
302 | j-easy/easy-random/139/131 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/131 | https://github.com/j-easy/easy-random/pull/139 | https://github.com/j-easy/easy-random/pull/139 | 1 | fixes | InstantiationException when providing a randomizer without default constructor in @Randomizer | The `@Randomizer` annotation requires a randomizer with a default constructor. If no such a constructor is defined, random beans throws a `InstantiationException`. Instead, the library should throw a `ObjectGenerationException` (as declared in the API) with the `InstantiationException` as root cause
| e534c0009e55585083e3379d995faf1afb0221a5 | 04a4f4ea4eb630b215bba34c741f82e90e11c12d | https://github.com/j-easy/easy-random/compare/e534c0009e55585083e3379d995faf1afb0221a5...04a4f4ea4eb630b215bba34c741f82e90e11c12d | diff --git a/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/AnnotationRandomizerRegistry.java b/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/AnnotationRandomizerRegistry.java
index e5c05fac..2ea8f6aa 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/AnnotationRandomizerRegistry.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/AnnotationRandomizerRegistry.java
@@ -31,8 +31,6 @@ import io.github.benas.randombeans.api.RandomizerRegistry;
import io.github.benas.randombeans.util.ReflectionUtils;
import java.lang.reflect.Field;
-import java.util.logging.Level;
-import java.util.logging.Logger;
/**
* A {@link RandomizerRegistry} for fields annotated with {@link io.github.benas.randombeans.annotation.Randomizer}.
@@ -42,8 +40,6 @@ import java.util.logging.Logger;
@Priority(-253)
public class AnnotationRandomizerRegistry implements RandomizerRegistry {
- private static final Logger LOGGER = Logger.getLogger(AnnotationRandomizerRegistry.class.getName());
-
/**
* Set the initial seed for all randomizers of the registry
*
@@ -61,19 +57,12 @@ public class AnnotationRandomizerRegistry implements RandomizerRegistry {
* @return the randomizer registered for the given field
*/
@Override
- @SuppressWarnings(value = "unchecked")
public Randomizer<?> getRandomizer(Field field) {
if (field.isAnnotationPresent(io.github.benas.randombeans.annotation.Randomizer.class)) {
io.github.benas.randombeans.annotation.Randomizer randomizer = field.getAnnotation(io.github.benas.randombeans.annotation.Randomizer.class);
Class<?> type = randomizer.value();
RandomizerArgument[] arguments = randomizer.args();
-
- try {
- //return (Randomizer<?>) type.newInstance();
- return ReflectionUtils.newInstance(type, arguments);
- } catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unable to create an instance of " + type.getName(), e);
- }
+ return ReflectionUtils.newInstance(type, arguments);
}
return null;
}
diff --git a/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java b/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
index fe2dfa10..184074d4 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
@@ -25,6 +25,7 @@
package io.github.benas.randombeans.util;
import io.github.benas.randombeans.annotation.RandomizerArgument;
+import io.github.benas.randombeans.api.ObjectGenerationException;
import io.github.benas.randombeans.api.Randomizer;
import lombok.experimental.UtilityClass;
@@ -267,29 +268,34 @@ public class ReflectionUtils {
return actualTypeArguments;
}
- public static <T> Randomizer<T> newInstance(Class<T> clazz, RandomizerArgument[] args) throws IllegalAccessException, InvocationTargetException, InstantiationException {
- if(args != null && args.length > 0) {
- for(Constructor c : clazz.getConstructors()) {
- if(c.getParameterCount() > 0 && c.getParameterCount() == args.length) {
- Object[] nArgs = new Object[args.length];
- Class[] argTypes = c.getParameterTypes();
- for(int x=0; x < args.length; x++) {
- Class<?> argType = argTypes[x];
- RandomizerArgument arg = args[x];
- String val = arg.value();
- Class type = arg.type();
+ @SuppressWarnings("unchecked")
+ public static <T> Randomizer<T> newInstance(Class<T> clazz, RandomizerArgument[] args) {
+ try {
+ if (args != null && args.length > 0) {
+ for (Constructor<?> c : clazz.getConstructors()) {
+ if (c.getParameterCount() > 0 && c.getParameterCount() == args.length) {
+ Object[] nArgs = new Object[args.length];
+ Class<?>[] argTypes = c.getParameterTypes();
+ for (int x = 0; x < args.length; x++) {
+ Class<?> argType = argTypes[x];
+ RandomizerArgument arg = args[x];
+ String val = arg.value();
+ Class<?> type = arg.type();
- if(argType.isAssignableFrom(arg.type())) {
- nArgs[x] = Mapper.INSTANCE.convertValue(val, type);
- } else {
- // Can't be a valid input for this constructor
- break;
+ if (argType.isAssignableFrom(arg.type())) {
+ nArgs[x] = Mapper.INSTANCE.convertValue(val, type);
+ } else {
+ // Can't be a valid input for this constructor
+ break;
+ }
}
+ return (Randomizer<T>) c.newInstance(nArgs);
}
- return (Randomizer<T>) c.newInstance(nArgs);
}
}
+ return (Randomizer<T>) clazz.newInstance();
+ } catch (IllegalAccessException | InvocationTargetException | InstantiationException e) {
+ throw new ObjectGenerationException(String.format("Could not create Randomizer with type: %s and constructor arguments: %s", clazz, Arrays.toString(args)), e);
}
- return (Randomizer<T>) clazz.newInstance();
- }
}
+}
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/RandomizerAnnotationTest.java b/random-beans/src/test/java/io/github/benas/randombeans/RandomizerAnnotationTest.java
index b02564f2..7dc94289 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/RandomizerAnnotationTest.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/RandomizerAnnotationTest.java
@@ -24,9 +24,11 @@
package io.github.benas.randombeans;
-import io.github.benas.randombeans.annotation.RandomizerArgument;
import org.junit.Test;
+import io.github.benas.randombeans.api.ObjectGenerationException;
+import io.github.benas.randombeans.api.Randomizer;
+
import static io.github.benas.randombeans.EnhancedRandomBuilder.aNewEnhancedRandomBuilder;
import static org.assertj.core.api.Assertions.assertThat;
@@ -38,6 +40,28 @@ public class RandomizerAnnotationTest {
assertThat(foo.getName()).isEqualTo("foo");
}
+ @Test(expected=ObjectGenerationException.class)
+ // https://github.com/benas/random-beans/issues/131
+ public void shouldThrowObjectGenerationExceptionWhenRandomizerUsedInRandomizerAnnotationHasNoDefaultConstructor() {
+ aNewEnhancedRandomBuilder().build().nextObject(Bar.class);
+ }
+
+ private class Bar {
+ @io.github.benas.randombeans.annotation.Randomizer(RandomizerWithoutDefaultConstrcutor.class)
+ private String name;
+ }
+
+ public static class RandomizerWithoutDefaultConstrcutor implements Randomizer<String> {
+
+ public RandomizerWithoutDefaultConstrcutor(int d) {
+ }
+
+ @Override
+ public String getRandomValue() {
+ return null;
+ }
+ }
+
private class Foo {
@io.github.benas.randombeans.annotation.Randomizer(DummyRandomizer.class)
private String name; | ['random-beans/src/main/java/io/github/benas/randombeans/randomizers/registry/AnnotationRandomizerRegistry.java', 'random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java', 'random-beans/src/test/java/io/github/benas/randombeans/RandomizerAnnotationTest.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 410,288 | 90,298 | 11,648 | 141 | 3,394 | 592 | 57 | 2 | 301 | 40 | 59 | 2 | 0 | 0 | 2016-05-29T12:32:21 | 1,478 | Java | {'Java': 1051325} | MIT License |
301 | j-easy/easy-random/205/204 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/204 | https://github.com/j-easy/easy-random/pull/205 | https://github.com/j-easy/easy-random/pull/205 | 1 | fixes | Instantiate Abstract Class | when i want to instantiate abstract class, instance of EnhancedRandom will create an object of concrete type but only fill properties of child class. if i put this abstract class in another class instance of container class will have object of concrete type of abstract class and all of properties is filled. i don't know it is a feature or bug. if it is feature just properties of object in container class should be filled which belong to child class. just like instantiating an abstract class with EnhancedRandom.
here is example:
```
abstract public class Vehicle {
private int productionYear;
public int getProductionYear() {
return productionYear;
}
public void setProductionYear(int productionYear) {
this.productionYear = productionYear;
}
}
public class Car extends Vehicle {
private long topSpeed;
public long getTopSpeed() {
return topSpeed;
}
public void setTopSpeed(long topSpeed) {
this.topSpeed = topSpeed;
}
}
public class Parking {
private Vehicle vehicle;
public Vehicle getVehicle() {
return vehicle;
}
public void setVehicle(Vehicle vehicle) {
this.vehicle = vehicle;
}
}
@Test
public void test() {
Car car1 = dummyCreator.nextObject(Car.class);
Vehicle vehicle1 = dummyCreator.nextObject(Vehicle.class);
Parking parking = dummyCreator.nextObject(Parking.class);
Vehicle vehicle2 = parking.getVehicle();
}
```
| a84f2d08db9a15906f1335372020c911889234e0 | 9b7cc49a733e7bc544f30be7131f0c21e7c9eed3 | https://github.com/j-easy/easy-random/compare/a84f2d08db9a15906f1335372020c911889234e0...9b7cc49a733e7bc544f30be7131f0c21e7c9eed3 | diff --git a/random-beans/src/main/java/io/github/benas/randombeans/EnhancedRandomImpl.java b/random-beans/src/main/java/io/github/benas/randombeans/EnhancedRandomImpl.java
index 972a947c..793003ed 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/EnhancedRandomImpl.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/EnhancedRandomImpl.java
@@ -117,7 +117,8 @@ class EnhancedRandomImpl extends EnhancedRandom {
// retrieve declared and inherited fields
List<Field> fields = getDeclaredFields(result);
- fields.addAll(getInheritedFields(type));
+ // we can not use type here, because with classpath scanning enabled the result can be a subtype
+ fields.addAll(getInheritedFields(result.getClass()));
// populate fields with random data
populateFields(fields, result, context);
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/EnhancedRandomImplTest.java b/random-beans/src/test/java/io/github/benas/randombeans/EnhancedRandomImplTest.java
index cb969295..f1c44102 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/EnhancedRandomImplTest.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/EnhancedRandomImplTest.java
@@ -227,7 +227,22 @@ public class EnhancedRandomImplTest {
}
@Test
- public void whenScanClasspathForConcreteTypesIsEnabled_thenShouldPopulateConcreteSubTypes() {
+ public void whenScanClasspathForConcreteTypesIsEnabled_thenShouldPopulateAbstractTypesWithConcreteSubTypes() {
+ // Given
+ enhancedRandom = EnhancedRandomBuilder.aNewEnhancedRandomBuilder().scanClasspathForConcreteTypes(true).build();
+
+ // When
+ Bar bar = enhancedRandom.nextObject(Bar.class);
+
+ // Then
+ assertThat(bar).isNotNull();
+ assertThat(bar).isInstanceOf(ConcreteBar.class);
+ // https://github.com/benas/random-beans/issues/204
+ assertThat(bar.getI()).isNotNull();
+ }
+
+ @Test
+ public void whenScanClasspathForConcreteTypesIsEnabled_thenShouldPopulateFieldsOfAbstractTypeWithConcreteSubTypes() {
// Given
enhancedRandom = EnhancedRandomBuilder.aNewEnhancedRandomBuilder().scanClasspathForConcreteTypes(true).build();
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/beans/Bar.java b/random-beans/src/test/java/io/github/benas/randombeans/beans/Bar.java
index 3254c663..0208be21 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/beans/Bar.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/beans/Bar.java
@@ -23,7 +23,12 @@
*/
package io.github.benas.randombeans.beans;
+import lombok.Data;
+
+@Data
public abstract class Bar {
+ private Integer i;
+
public abstract String getName();
} | ['random-beans/src/test/java/io/github/benas/randombeans/EnhancedRandomImplTest.java', 'random-beans/src/main/java/io/github/benas/randombeans/EnhancedRandomImpl.java', 'random-beans/src/test/java/io/github/benas/randombeans/beans/Bar.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 507,255 | 111,478 | 13,905 | 170 | 230 | 41 | 3 | 1 | 1,494 | 190 | 304 | 52 | 0 | 1 | 2016-09-30T19:44:43 | 1,478 | Java | {'Java': 1051325} | MIT License |
300 | j-easy/easy-random/209/208 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/208 | https://github.com/j-easy/easy-random/pull/209 | https://github.com/j-easy/easy-random/pull/209 | 1 | fixes | ClassCastException on wildcard generics in collections | Hello, Trying to figure out the best way around this. I want to populate some JAX-B beans. They have fields like this:
`@XmlElementRef(name = "reportedLocation", namespace = "http://api.foo.com/v3", type = JAXBElement.class)
protected List<JAXBElement<? extends ReportedLocation>> reportedLocations;`
I get a ClassCastException exception from this:
`Caused by: java.lang.ClassCastException: sun.reflect.generics.reflectiveObjects.ParameterizedTypeImpl cannot be cast to java.lang.Class
at io.github.benas.randombeans.CollectionPopulator.getRandomCollection(CollectionPopulator.java:68)
at io.github.benas.randombeans.FieldPopulator.generateRandomValue(FieldPopulator.java:93)
at io.github.benas.randombeans.FieldPopulator.populateField(FieldPopulator.java:79)
at io.github.benas.randombeans.EnhancedRandomImpl.populateField(EnhancedRandomImpl.java:162)
at io.github.benas.randombeans.EnhancedRandomImpl.populateFields(EnhancedRandomImpl.java:153)
at io.github.benas.randombeans.EnhancedRandomImpl.doPopulateBean(EnhancedRandomImpl.java:124)
... 203 more`
There are quite a few of these. I have a custom registry for some other abstract classes I have but I'm unsure of the best way around this one.
Another suggestion, it might be nice if the EnhancedRandom were to have a brute force mode where it never ever stops for these kinds of errors and just skips them (and logs an error maybe)
Great library everyone!
| 17d70a0d3c1d4915e866b4c5de23345a3ee300de | 270e39ff9c6ac91f977aaf4a9c8c0f0d3f99af2b | https://github.com/j-easy/easy-random/compare/17d70a0d3c1d4915e866b4c5de23345a3ee300de...270e39ff9c6ac91f977aaf4a9c8c0f0d3f99af2b | diff --git a/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java b/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
index 4666f7b4..c2b4d263 100644
--- a/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
+++ b/random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java
@@ -242,7 +242,7 @@ public class ReflectionUtils {
* @return true if the type is populatable, false otherwise
*/
public static boolean isPopulatable(final Type type) {
- return !isWildcardType(type) && !isTypeVariable(type) && !isCollectionType(type);
+ return !isWildcardType(type) && !isTypeVariable(type) && !isCollectionType(type) && !isParameterizedType(type);
}
/**
diff --git a/random-beans/src/test/java/io/github/benas/randombeans/beans/WildCardCollectionBean.java b/random-beans/src/test/java/io/github/benas/randombeans/beans/WildCardCollectionBean.java
index c31730c9..806452b4 100644
--- a/random-beans/src/test/java/io/github/benas/randombeans/beans/WildCardCollectionBean.java
+++ b/random-beans/src/test/java/io/github/benas/randombeans/beans/WildCardCollectionBean.java
@@ -48,6 +48,8 @@ public class WildCardCollectionBean {
private List<?> unboundedWildCardTypedList;
private List<? extends Runnable> boundedWildCardTypedList;
+ // https://github.com/benas/random-beans/issues/208
+ private List<Comparable<? extends Object>> nestedBoundedWildCardTypedList;
private Queue<?> unboundedWildCardTypedQueue;
private Queue<? extends Runnable> boundedWildCardTypedQueue; | ['random-beans/src/main/java/io/github/benas/randombeans/util/ReflectionUtils.java', 'random-beans/src/test/java/io/github/benas/randombeans/beans/WildCardCollectionBean.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 507,377 | 111,501 | 13,906 | 170 | 211 | 52 | 2 | 1 | 1,451 | 134 | 324 | 22 | 1 | 0 | 2016-10-07T16:33:55 | 1,478 | Java | {'Java': 1051325} | MIT License |
299 | j-easy/easy-random/240/237 | j-easy | easy-random | https://github.com/j-easy/easy-random/issues/237 | https://github.com/j-easy/easy-random/pull/240 | https://github.com/j-easy/easy-random/pull/240 | 1 | fixes | Fundamental issue in random-beans-validation | I found fundamental problem in `random-beans-validation` implementation. Currently every randomizer created by the package will generate same value every time (or almost every time, see below).
Let's for example look into [`SizeAnnotationHandler`](https://github.com/benas/random-beans/blob/master/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java).
Every time randomizer is looked up for field annotated with `@Size` it will create [new instance](https://github.com/benas/random-beans/blob/master/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java#L56) of `CharacterRandomizer` providing it with **same** seed and therefore randomizer will generate **same** value every time. Or almost every time, because randomizer for string length is [not using](https://github.com/benas/random-beans/blob/master/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java#L54) seed (but it should). Other annotation handlers have same issue, except those generating constant values (for obvious reasons).
Ideal solution would be to somehow cache randomizers created for the field from annotation. But I'm not good with Java, so not sure how feasible/doable it is. | d4a47dd6e62fe589f2fcaa8d1668cc8b0c168ee4 | bb16b8a1c2736fa12f20af37385317dac3f56d58 | https://github.com/j-easy/easy-random/compare/d4a47dd6e62fe589f2fcaa8d1668cc8b0c168ee4...bb16b8a1c2736fa12f20af37385317dac3f56d58 | diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/DecimaMinMaxAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/DecimaMinMaxAnnotationHandler.java
index f23d2e86..155bb7f8 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/DecimaMinMaxAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/DecimaMinMaxAnnotationHandler.java
@@ -32,13 +32,14 @@ import javax.validation.constraints.DecimalMin;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.math.BigInteger;
+import java.util.Random;
class DecimaMinMaxAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
public DecimaMinMaxAnnotationHandler(long seed) {
- this.seed = seed;
+ random = new Random(seed);
}
public Randomizer<?> getRandomizer(Field field) {
@@ -62,49 +63,49 @@ class DecimaMinMaxAnnotationHandler implements BeanValidationAnnotationHandler {
return new ByteRangeRandomizer(
minValue == null ? null : minValue.byteValue(),
maxValue == null ? null : maxValue.byteValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Short.TYPE) || fieldType.equals(Short.class)) {
return new ShortRangeRandomizer(
minValue == null ? null : minValue.shortValue(),
maxValue == null ? null : maxValue.shortValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Integer.TYPE) || fieldType.equals(Integer.class)) {
return new IntegerRangeRandomizer(
minValue == null ? null : minValue.intValue(),
maxValue == null ? null : maxValue.intValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Long.TYPE) || fieldType.equals(Long.class)) {
return new LongRangeRandomizer(
minValue == null ? null : minValue.longValue(),
maxValue == null ? null : maxValue.longValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(BigInteger.class)) {
return new BigIntegerRangeRandomizer(
minValue == null ? null : minValue.intValue(),
maxValue == null ? null : maxValue.intValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(BigDecimal.class)) {
return new BigDecimalRangeRandomizer(
minValue == null ? null : minValue.longValue(),
maxValue == null ? null : maxValue.longValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(String.class)) {
BigDecimalRangeRandomizer delegate = new BigDecimalRangeRandomizer(
minValue == null ? null : minValue.longValue(),
maxValue == null ? null : maxValue.longValue(),
- seed
+ random.nextLong()
);
return new StringDelegatingRandomizer(delegate);
}
diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/FutureAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/FutureAnnotationHandler.java
index dc1f4b19..a7a6bddc 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/FutureAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/FutureAnnotationHandler.java
@@ -30,18 +30,19 @@ import io.github.benas.randombeans.util.Constants;
import java.lang.reflect.Field;
import java.util.Calendar;
import java.util.Date;
+import java.util.Random;
class FutureAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
public FutureAnnotationHandler(long seed) {
- this.seed = seed;
+ random = new Random(seed);
}
public Randomizer<?> getRandomizer(Field field) {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.YEAR, Constants.DEFAULT_DATE_RANGE);
- return new DateRangeRandomizer(new Date(), calendar.getTime(), seed);
+ return new DateRangeRandomizer(new Date(), calendar.getTime(), random.nextLong());
}
}
diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/MinMaxAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/MinMaxAnnotationHandler.java
index c2792117..cbc21348 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/MinMaxAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/MinMaxAnnotationHandler.java
@@ -31,13 +31,14 @@ import javax.validation.constraints.Min;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.math.BigInteger;
+import java.util.Random;
class MinMaxAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
public MinMaxAnnotationHandler(long seed) {
- this.seed = seed;
+ random = new Random(seed);
}
public Randomizer<?> getRandomizer(Field field) {
@@ -61,42 +62,42 @@ class MinMaxAnnotationHandler implements BeanValidationAnnotationHandler {
return new ByteRangeRandomizer(
minValue == null ? null : minValue.byteValue(),
maxValue == null ? null : maxValue.byteValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Short.TYPE) || fieldType.equals(Short.class)) {
return new ShortRangeRandomizer(
minValue == null ? null : minValue.shortValue(),
maxValue == null ? null : maxValue.shortValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Integer.TYPE) || fieldType.equals(Integer.class)) {
return new IntegerRangeRandomizer(
minValue == null ? null : minValue.intValue(),
maxValue == null ? null : maxValue.intValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(Long.TYPE) || fieldType.equals(Long.class)) {
return new LongRangeRandomizer(
minValue == null ? null : minValue,
maxValue == null ? null : maxValue,
- seed
+ random.nextLong()
);
}
if (fieldType.equals(BigInteger.class)) {
return new BigIntegerRangeRandomizer(
minValue == null ? null : minValue.intValue(),
maxValue == null ? null : maxValue.intValue(),
- seed
+ random.nextLong()
);
}
if (fieldType.equals(BigDecimal.class)) {
return new BigDecimalRangeRandomizer(
minValue == null ? null : minValue,
maxValue == null ? null : maxValue,
- seed
+ random.nextLong()
);
}
}
diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PastAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PastAnnotationHandler.java
index 16255216..b488272a 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PastAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PastAnnotationHandler.java
@@ -30,18 +30,19 @@ import io.github.benas.randombeans.util.Constants;
import java.lang.reflect.Field;
import java.util.Calendar;
import java.util.Date;
+import java.util.Random;
class PastAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
public PastAnnotationHandler(long seed) {
- this.seed = seed;
+ random = new Random(seed);
}
public Randomizer<?> getRandomizer(Field field) {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.YEAR, -Constants.DEFAULT_DATE_RANGE);
- return new DateRangeRandomizer(calendar.getTime(), new Date(), seed);
+ return new DateRangeRandomizer(calendar.getTime(), new Date(), random.nextLong());
}
}
diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PatternAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PatternAnnotationHandler.java
index 4ae738e8..70810868 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PatternAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PatternAnnotationHandler.java
@@ -28,13 +28,14 @@ import io.github.benas.randombeans.randomizers.RegularExpressionRandomizer;
import javax.validation.constraints.Pattern;
import java.lang.reflect.Field;
+import java.util.Random;
class PatternAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
public PatternAnnotationHandler(long seed) {
- this.seed = seed;
+ random = new Random(seed);
}
public Randomizer<?> getRandomizer(Field field) {
@@ -43,7 +44,7 @@ class PatternAnnotationHandler implements BeanValidationAnnotationHandler {
final String regex = patternAnnotation.regexp();
if (fieldType.equals(String.class)) {
- return new RegularExpressionRandomizer(regex, seed);
+ return new RegularExpressionRandomizer(regex, random.nextLong());
}
return null;
}
diff --git a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java
index d963ea6b..009bd5c1 100644
--- a/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java
+++ b/random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java
@@ -24,23 +24,21 @@
package io.github.benas.randombeans.validation;
import io.github.benas.randombeans.api.Randomizer;
-import io.github.benas.randombeans.randomizers.text.CharacterRandomizer;
+import io.github.benas.randombeans.randomizers.text.StringRandomizer;
import javax.validation.constraints.Size;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
-
-import static io.github.benas.randombeans.randomizers.range.IntegerRangeRandomizer.aNewIntegerRangeRandomizer;
-import static io.github.benas.randombeans.randomizers.text.CharacterRandomizer.aNewCharacterRandomizer;
+import java.util.Random;
class SizeAnnotationHandler implements BeanValidationAnnotationHandler {
- private long seed;
+ private final Random random;
private Charset charset;
public SizeAnnotationHandler(long seed, Charset charset) {
- this.seed = seed;
+ random = new Random(seed);
this.charset = charset;
}
@@ -51,19 +49,7 @@ class SizeAnnotationHandler implements BeanValidationAnnotationHandler {
final int min = sizeAnnotation.min();
final int max = sizeAnnotation.max();
if (fieldType.equals(String.class)) {
- final int randomLength = aNewIntegerRangeRandomizer(min, max).getRandomValue();
- return new Randomizer<String>() {
- private final CharacterRandomizer characterRandomizer = aNewCharacterRandomizer(charset, seed);
-
- @Override
- public String getRandomValue() {
- StringBuilder stringBuilder = new StringBuilder();
- for (int i = 0; i < randomLength; i++) {
- stringBuilder.append(characterRandomizer.getRandomValue());
- }
- return stringBuilder.toString();
- }
- };
+ return new StringRandomizer(charset, min, max, random.nextLong());
}
return null;
}
diff --git a/random-beans-validation/src/test/java/io/github/benas/randombeans/validation/BeanValidationTest.java b/random-beans-validation/src/test/java/io/github/benas/randombeans/validation/BeanValidationTest.java
index 934faaf9..2d4534b4 100644
--- a/random-beans-validation/src/test/java/io/github/benas/randombeans/validation/BeanValidationTest.java
+++ b/random-beans-validation/src/test/java/io/github/benas/randombeans/validation/BeanValidationTest.java
@@ -23,19 +23,22 @@
*/
package io.github.benas.randombeans.validation;
-import io.github.benas.randombeans.api.EnhancedRandom;
-import org.junit.Before;
-import org.junit.Test;
+import static io.github.benas.randombeans.EnhancedRandomBuilder.aNewEnhancedRandom;
+import static io.github.benas.randombeans.EnhancedRandomBuilder.aNewEnhancedRandomBuilder;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.math.BigDecimal;
+import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
-import java.math.BigDecimal;
-import java.util.Set;
-import static io.github.benas.randombeans.EnhancedRandomBuilder.aNewEnhancedRandom;
-import static org.assertj.core.api.Assertions.assertThat;
+import org.junit.Before;
+import org.junit.Test;
+
+import io.github.benas.randombeans.api.EnhancedRandom;
public class BeanValidationTest {
@@ -68,9 +71,9 @@ public class BeanValidationTest {
assertThat(bean.getMinQuantity()).isGreaterThanOrEqualTo(5);// @Min(5) int minQuantity;
- assertThat(bean.getMaxDiscount()).isLessThanOrEqualTo(new BigDecimal("30.00"));// @DecimalMax("30.00") BigDecimal maxDiscount;;
+ assertThat(bean.getMaxDiscount()).isLessThanOrEqualTo(new BigDecimal("30.00"));// @DecimalMax("30.00") BigDecimal maxDiscount;
- assertThat(bean.getMinDiscount()).isGreaterThanOrEqualTo(new BigDecimal("5.00"));// @DecimalMin("5.00") BigDecimal minDiscount;;
+ assertThat(bean.getMinDiscount()).isGreaterThanOrEqualTo(new BigDecimal("5.00"));// @DecimalMin("5.00") BigDecimal minDiscount;
assertThat(bean.getMinQuantity()).isGreaterThanOrEqualTo(5);// @Min(5) int minQuantity;
@@ -79,6 +82,26 @@ public class BeanValidationTest {
assertThat(bean.getRegexString()).matches("[a-z]{4}");
}
+ @Test
+ public void shouldGenerateTheSameValueForTheSameSeed() {
+ EnhancedRandom random = aNewEnhancedRandomBuilder().seed(123L).build();
+
+ BeanValidationAnnotatedBean bean = random.nextObject(BeanValidationAnnotatedBean.class);
+
+ assertThat(bean.getUsername()).isEqualTo("eOMtThyhVNLWUZNRcBaQKxIy");
+ // uses DateRange with now as end, so test is not repeatable
+ // assertThat(bean.getBirthday()).isEqualTo("2007-07-22T13:20:35.628");
+ // uses DateRange with now as start, so test is not repeatable
+ // assertThat(bean.getEventDate()).isEqualTo("2017-07-22T13:20:35.628");
+ assertThat(bean.getMaxQuantity()).isEqualTo(-2055951746);
+ assertThat(bean.getMinQuantity()).isEqualTo(91531906);
+ assertThat(bean.getMaxDiscount()).isEqualTo(new BigDecimal(30));
+ assertThat(bean.getMinDiscount()).isEqualTo(new BigDecimal(393126525614007301L));
+ assertThat(bean.getMinQuantity()).isEqualTo(91531906);
+ assertThat(bean.getBriefMessage()).isEqualTo("tguu");
+ assertThat(bean.getRegexString()).isEqualTo("vuna");
+ }
+
@Test
public void generatedBeanShouldBeValidUsingBeanValidationAPI() {
BeanValidationAnnotatedBean bean = enhancedRandom.nextObject(BeanValidationAnnotatedBean.class); | ['random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PastAnnotationHandler.java', 'random-beans-validation/src/main/java/io/github/benas/randombeans/validation/MinMaxAnnotationHandler.java', 'random-beans-validation/src/main/java/io/github/benas/randombeans/validation/DecimaMinMaxAnnotationHandler.java', 'random-beans-validation/src/main/java/io/github/benas/randombeans/validation/SizeAnnotationHandler.java', 'random-beans-validation/src/test/java/io/github/benas/randombeans/validation/BeanValidationTest.java', 'random-beans-validation/src/main/java/io/github/benas/randombeans/validation/FutureAnnotationHandler.java', 'random-beans-validation/src/main/java/io/github/benas/randombeans/validation/PatternAnnotationHandler.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 517,748 | 113,439 | 14,137 | 170 | 3,419 | 556 | 81 | 6 | 1,313 | 121 | 271 | 7 | 3 | 0 | 2017-02-15T21:12:27 | 1,478 | Java | {'Java': 1051325} | MIT License |
9,996 | apache/eventmesh/2850/2849 | apache | eventmesh | https://github.com/apache/eventmesh/issues/2849 | https://github.com/apache/eventmesh/pull/2850 | https://github.com/apache/eventmesh/pull/2850 | 1 | fixes | [Bug] org.apache.eventmesh.runtime.util.WebhookUtilTest > testObtainDeliveryAgreement FAILED | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
> Task :eventmesh-runtime:test FAILED
org.apache.eventmesh.runtime.util.WebhookUtilTest > testObtainDeliveryAgreement FAILED
java.lang.AssertionError at WebhookUtilTest.java:60
> Task :eventmesh-runtime:spotbugsMain
SpotBugs ended with exit code 1
FAILURE: Build failed with an exception.
### How to reproduce
./gradlew :eventmesh-runtime:test
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | decf21bffbbe36663228880a312e82f3392a6d00 | 3bda8fbe10e0710c2ab8ae720cfa6cbaebeac249 | https://github.com/apache/eventmesh/compare/decf21bffbbe36663228880a312e82f3392a6d00...3bda8fbe10e0710c2ab8ae720cfa6cbaebeac249 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
index aa885b2a3..2cc760b83 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
@@ -32,17 +32,15 @@ import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import lombok.extern.slf4j.Slf4j;
/**
* Utility class for implementing CloudEvents Http Webhook spec
*
* @see <a href="https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/http-webhook.md">CloudEvents Http Webhook</a>
*/
+@Slf4j
public class WebhookUtil {
- private static final Logger LOGGER = LoggerFactory.getLogger(WebhookUtil.class);
-
private static final String CONTENT_TYPE_HEADER = "Content-Type";
private static final String REQUEST_ORIGIN_HEADER = "WebHook-Request-Origin";
private static final String ALLOWED_ORIGIN_HEADER = "WebHook-Allowed-Origin";
@@ -53,8 +51,8 @@ public class WebhookUtil {
final String targetUrl,
final String requestOrigin) {
- if (LOGGER.isInfoEnabled()) {
- LOGGER.info("obtain webhook delivery agreement for url: {}", targetUrl);
+ if (log.isInfoEnabled()) {
+ log.info("obtain webhook delivery agreement for url: {}", targetUrl);
}
final HttpOptions builder = new HttpOptions(targetUrl);
@@ -65,8 +63,10 @@ public class WebhookUtil {
return StringUtils.isEmpty(allowedOrigin)
|| "*".equals(allowedOrigin) || allowedOrigin.equalsIgnoreCase(requestOrigin);
} catch (Exception e) {
- LOGGER.error("HTTP Options Method is not supported at the Delivery Target: {}, "
- + "unable to obtain the webhook delivery agreement.", targetUrl);
+ if (log.isErrorEnabled()) {
+ log.error("HTTP Options Method is not supported at the Delivery Target: {}, "
+ + "unable to obtain the webhook delivery agreement.", targetUrl);
+ }
}
return true;
}
diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/WebhookUtilTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/WebhookUtilTest.java
index a84b5c049..9acd032f8 100644
--- a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/WebhookUtilTest.java
+++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/WebhookUtilTest.java
@@ -25,13 +25,11 @@ import static org.mockito.Mockito.mock;
import org.apache.eventmesh.api.auth.AuthService;
import org.apache.eventmesh.spi.EventMeshExtensionFactory;
-import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.message.BasicHeader;
-import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@@ -45,21 +43,23 @@ public class WebhookUtilTest {
@Test
public void testObtainDeliveryAgreement() {
// normal case
- try (CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
- CloseableHttpResponse response = Mockito.mock(CloseableHttpResponse.class);
- CloseableHttpClient httpClient2 = Mockito.mock(CloseableHttpClient.class)) {
+ try (CloseableHttpClient httpClient = mock(CloseableHttpClient.class);
+ CloseableHttpResponse response = mock(CloseableHttpResponse.class);
+ CloseableHttpClient httpClient2 = mock(CloseableHttpClient.class)) {
- Mockito.when(response.getLastHeader("WebHook-Allowed-Origin")).thenReturn(new BasicHeader("WebHook-Allowed-Origin", "*"));
+ Mockito.when(response.getLastHeader("WebHook-Allowed-Origin"))
+ .thenReturn(new BasicHeader("WebHook-Allowed-Origin", "*"));
Mockito.when(httpClient.execute(any())).thenReturn(response);
- Assert.assertTrue(WebhookUtil.obtainDeliveryAgreement(httpClient, "https://eventmesh.apache.org", "*"));
+ Assert.assertTrue("match logic must return true",
+ WebhookUtil.obtainDeliveryAgreement(httpClient, "https://eventmesh.apache.org", "*"));
// abnormal case
Mockito.when(httpClient2.execute(any())).thenThrow(new RuntimeException());
try {
- WebhookUtil.obtainDeliveryAgreement(httpClient2, "xxx", "*");
- Assert.fail("invalid url should throw RuntimeException!");
+ Assert.assertTrue("when throw exception ,default return true",
+ WebhookUtil.obtainDeliveryAgreement(httpClient2, "xxx", "*"));
} catch (RuntimeException e) {
- Assert.assertNotNull(e);
+ Assert.fail(e.getMessage());
}
} catch (Exception e) {
@@ -69,20 +69,20 @@ public class WebhookUtilTest {
@Test
public void testSetWebhookHeaders() {
- String authType = "auth-http-basic";
- AuthService authService = mock(AuthService.class);
+ final String authType = "auth-http-basic";
+ final AuthService authService = mock(AuthService.class);
doNothing().when(authService).init();
- Map<String, String> authParams = new HashMap<>();
- String key = "Authorization";
- String value = "Basic ****";
+ final Map<String, String> authParams = new HashMap<>();
+ final String key = "Authorization";
+ final String value = "Basic ****";
authParams.put(key, value);
Mockito.when(authService.getAuthParams()).thenReturn(authParams);
try (MockedStatic<EventMeshExtensionFactory> dummyStatic = Mockito.mockStatic(EventMeshExtensionFactory.class)) {
dummyStatic.when(() -> EventMeshExtensionFactory.getExtension(AuthService.class, authType)).thenReturn(authService);
- HttpPost post = new HttpPost();
+ final HttpPost post = new HttpPost();
WebhookUtil.setWebhookHeaders(post, "application/json", "eventmesh.FT", authType);
- Assert.assertEquals(post.getLastHeader(key).getValue(), value);
+ Assert.assertEquals("match expect value", post.getLastHeader(key).getValue(), value);
}
}
}
\\ No newline at end of file | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java', 'eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/WebhookUtilTest.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,453,721 | 669,929 | 89,961 | 671 | 852 | 165 | 16 | 1 | 690 | 81 | 168 | 36 | 1 | 0 | 2023-01-07T02:27:29 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,987 | apache/eventmesh/3459/3458 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3458 | https://github.com/apache/eventmesh/pull/3459 | https://github.com/apache/eventmesh/pull/3459 | 1 | fixes | [Bug] EventMeshHttpProducer publish Read timed out | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
When I use EventMeshHttpProducer to publish message. throw java.net.SocketTimeoutException: Read timed out
```java
public class HTTP {
public static void main(String[] args) throws Exception {
EventMeshHttpClientConfig eventMeshClientConfig = EventMeshHttpClientConfig.builder()
.liteEventMeshAddr("192.168.1.4:10105")
.producerGroup("TEST_PRODUCER_GROUP")
.env("DEV")
.idc("idc")
.ip(IPUtils.getLocalAddress())
.sys("1234")
.pid(String.valueOf(ThreadUtils.getPID()))
.userName("eventmesh")
.password("password")
.build();
EventMeshHttpProducer eventMeshHttpProducer = new EventMeshHttpProducer(eventMeshClientConfig);
Map<String, String> content = new HashMap<>();
content.put("content", "testAsyncMessage----------------------");
CloudEvent event = CloudEventBuilder.v1()
.withId(UUID.randomUUID().toString())
.withSubject("eventmesh-async-topic")
.withSource(URI.create("/"))
.withDataContentType("application/cloudevents+json")
.withType(EventMeshCommon.CLOUD_EVENTS_PROTOCOL_NAME)
.withData(JsonUtils.toJSONString(content).getBytes(StandardCharsets.UTF_8))
.withExtension(Constants.EVENTMESH_MESSAGE_CONST_TTL, String.valueOf(4 * 1000))
.build();
for(int i = 0; i < 10; ++i){
eventMeshHttpProducer.publish(event);
}
}
}
```
### How to reproduce
public class HTTP {
public static void main(String[] args) throws Exception {
EventMeshHttpClientConfig eventMeshClientConfig = EventMeshHttpClientConfig.builder()
.liteEventMeshAddr("192.168.1.4:10105")
.producerGroup("TEST_PRODUCER_GROUP")
.env("DEV")
.idc("idc")
.ip(IPUtils.getLocalAddress())
.sys("1234")
.pid(String.valueOf(ThreadUtils.getPID()))
.userName("eventmesh")
.password("password")
.build();
EventMeshHttpProducer eventMeshHttpProducer = new EventMeshHttpProducer(eventMeshClientConfig);
Map<String, String> content = new HashMap<>();
content.put("content", "testAsyncMessage----------------------");
CloudEvent event = CloudEventBuilder.v1()
.withId(UUID.randomUUID().toString())
.withSubject("eventmesh-async-topic")
.withSource(URI.create("/"))
.withDataContentType("application/cloudevents+json")
.withType(EventMeshCommon.CLOUD_EVENTS_PROTOCOL_NAME)
.withData(JsonUtils.toJSONString(content).getBytes(StandardCharsets.UTF_8))
.withExtension(Constants.EVENTMESH_MESSAGE_CONST_TTL, String.valueOf(4 * 1000))
.build();
for(int i = 0; i < 10; ++i){
eventMeshHttpProducer.publish(event);
}
}
}
### Debug logs
client exception
```
2023-03-16 22:59:54,851 DEBUG [main] HttpUtils(HttpUtils.java:112) - POST http://192.168.1.4:10105 HTTP/1.1
Exception in thread "main" org.apache.eventmesh.common.exception.EventMeshException: Publish message error, target:http://192.168.1.4:10105
at org.apache.eventmesh.client.http.AbstractProducerHttpClient.publish(AbstractProducerHttpClient.java:56)
at org.apache.eventmesh.client.http.producer.EventMeshHttpProducer.publish(EventMeshHttpProducer.java:47)
at org.example.objectsize.HTTP.main(HTTP.java:48)
Caused by: java.net.SocketTimeoutException: Read timed out
at java.base/java.net.SocketInputStream.socketRead0(Native Method)
at java.base/java.net.SocketInputStream.socketRead(SocketInputStream.java:115)
at java.base/java.net.SocketInputStream.read(SocketInputStream.java:168)
at java.base/java.net.SocketInputStream.read(SocketInputStream.java:140)
at org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)
at org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)
at org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:280)
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)
at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)
at org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)
at org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:157)
at org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)
at org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)
at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)
at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:186)
at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:72)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:221)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:165)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:140)
at org.apache.eventmesh.client.http.util.HttpUtils.post(HttpUtils.java:115)
at org.apache.eventmesh.client.http.util.HttpUtils.post(HttpUtils.java:65)
at org.apache.eventmesh.client.http.util.HttpUtils.post(HttpUtils.java:57)
at org.apache.eventmesh.client.http.AbstractProducerHttpClient.publish(AbstractProducerHttpClient.java:50)
... 2 more
2023-03-16 23:00:09,885 pool-1-thread-1 DEBUG Stopping LoggerContext[name=2437c6dc, org.apache.logging.log4j.core.LoggerContext@a202ccb]
2023-03-16 23:00:09,886 pool-1-thread-1 DEBUG Stopping LoggerContext[name=2437c6dc, org.apache.logging.log4j.core.LoggerContext@a202ccb]...
2023-03-16 23:00:09,888 pool-1-thread-1 DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false
2023-03-16 23:00:09,888 pool-1-thread-1 DEBUG OutputStream closed
2023-03-16 23:00:09,888 pool-1-thread-1 DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false, all resources released: true
2023-03-16 23:00:09,889 pool-1-thread-1 DEBUG Appender console stopped with status true
2023-03-16 23:00:09,889 pool-1-thread-1 DEBUG Log4j2 ConfigurationScheduler shutting down threads in java.util.concurrent.ScheduledThreadPoolExecutor@1e78a3f9[Running, pool size = 1, active threads = 0, queued tasks = 1, completed tasks = 0]
2023-03-16 23:00:09,889 pool-1-thread-1 DEBUG Stopped XmlConfiguration[location=jar:file:/E:/develop/mavenRepository/org/apache/eventmesh/eventmesh-sdk-java/1.8.1-SNAPSHOT/eventmesh-sdk-java-1.8.1-SNAPSHOT.jar!/log4j2.xml] OK
2023-03-16 23:00:09,889 pool-1-thread-1 DEBUG Stopped LoggerContext[name=2437c6dc, org.apache.logging.log4j.core.LoggerContext@a202ccb] with status true
```
runtime exception
```
2023-03-16 22:59:54,875 DEBUG [eventMesh-http-worker-2] AbstractHTTPServer(AbstractHTTPServer.java:440) - httpCommand={REQ,POST/HTTP,requestCode=104,opaque=3,header=sendMessageRequestHeader={code=104,language=JAVA,version=V1,env=DEV,idc=idc,sys=1234,pid=10988,ip=192.168.1.4:64535,username=eventmesh,passwd=password},body=sendMessageRequestBody={topic=null,bizSeqNo=null,uniqueId=null,content={"specversion":"1.0","id":"f3801022-0431-407a-bb2f-a9e1fa230cba","source":"/","type":"cloudevents","datacontenttype":"application/cloudevents+json","subject":"eventmesh-async-topic","protocolversion":"1.0","ip":"192.168.43.1","idc":"idc","protocoldesc":"V1","bizseqno":"927518096792131111260710628736","pid":"10988","language":"JAVA","env":"DEV","sys":"1234","ttl":"4000","uniqueid":"807133169894333271230374891000","data_base64":"eyJjb250ZW50IjoidGVzdEFzeW5jTWVzc2FnZS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0ifQ=="},ttl=null,tag=,producerGroup=TEST_PRODUCER_GROUP,extFields=null}}
2023-03-16 22:59:55,038 INFO [eventMesh-sendMsg-1] cmd(SendAsyncMessageProcessor.java:89) - cmd=MSG_SEND_ASYNC|http|client2eventMesh|from=192.168.1.4:64535|to=192.168.43.1
2023-03-16 22:59:55,038 INFO [eventMesh-sendMsg-1] EventMeshExtensionFactory(EventMeshExtensionFactory.java:92) - initialize extension instance success, extensionType: interface org.apache.eventmesh.protocol.api.ProtocolAdaptor, extensionInstanceName: cloudevents
2023-03-16 22:59:55,044 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-16 22:59:55,044 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
2023-03-16 22:59:55,104 ERROR [eventMesh-sendMsg-1] AbstractHTTPServer(AbstractHTTPServer.java:563) - process error
java.lang.NullPointerException: null
at java.util.Hashtable.put(Hashtable.java:460) ~[?:1.8.0_202]
at org.apache.eventmesh.runtime.core.protocol.http.producer.EventMeshProducer.init(EventMeshProducer.java:89) ~[classes/:?]
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerManager.createEventMeshProducer(ProducerManager.java:94) ~[classes/:?]
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerManager.getEventMeshProducer(ProducerManager.java:53) ~[classes/:?]
at org.apache.eventmesh.runtime.core.protocol.http.processor.SendAsyncMessageProcessor.processRequest(SendAsyncMessageProcessor.java:212) ~[classes/:?]
at org.apache.eventmesh.runtime.boot.AbstractHTTPServer$HTTPHandler.lambda$processEventMeshRequest$1(AbstractHTTPServer.java:548) ~[classes/:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [?:1.8.0_202]
at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) [?:1.8.0_202]
at java.util.concurrent.FutureTask.run(FutureTask.java) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_202]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]
2023-03-16 22:59:55,653 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 22:59:56,668 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 22:59:57,672 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 22:59:58,682 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 22:59:59,683 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 23:00:00,698 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 23:00:01,709 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 23:00:02,711 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 23:00:03,720 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
2023-03-16 23:00:04,734 DEBUG [StandaloneConsumerThread-1] SubScribeTask(SubScribeTask.java:57) - execute subscribe task, topic: eventmesh-async-topic, offset: null
```
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 13f0a32e47b4ef888ae508349b1f55165f5ff582 | abd1cfec7e202092a7a54976828be41f35286796 | https://github.com/apache/eventmesh/compare/13f0a32e47b4ef888ae508349b1f55165f5ff582...abd1cfec7e202092a7a54976828be41f35286796 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/EventMeshProducer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/EventMeshProducer.java
index 7e4c47d1a..f58f20ea6 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/EventMeshProducer.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/EventMeshProducer.java
@@ -25,6 +25,8 @@ import org.apache.eventmesh.runtime.core.consumergroup.ProducerGroupConf;
import org.apache.eventmesh.runtime.core.plugin.MQProducerWrapper;
import org.apache.eventmesh.runtime.util.EventMeshUtil;
+import org.apache.commons.lang3.StringUtils;
+
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -86,7 +88,9 @@ public class EventMeshProducer {
keyValue.put("producerGroup", producerGroupConfig.getGroupName());
keyValue.put("instanceName", EventMeshUtil.buildMeshClientID(producerGroupConfig.getGroupName(),
eventMeshHttpConfiguration.getEventMeshCluster()));
- keyValue.put(Constants.PRODUCER_TOKEN, producerGroupConfig.getToken());
+ if (StringUtils.isNotBlank(producerGroupConfig.getToken())) {
+ keyValue.put(Constants.PRODUCER_TOKEN, producerGroupConfig.getToken());
+ }
//TODO for defibus
keyValue.put("eventMeshIDC", eventMeshHttpConfiguration.getEventMeshIDC()); | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/EventMeshProducer.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,785,651 | 741,732 | 101,181 | 811 | 295 | 52 | 6 | 1 | 12,399 | 613 | 3,215 | 172 | 3 | 3 | 2023-03-16T15:22:30 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,997 | apache/eventmesh/2837/2836 | apache | eventmesh | https://github.com/apache/eventmesh/issues/2836 | https://github.com/apache/eventmesh/pull/2837 | https://github.com/apache/eventmesh/pull/2837 | 1 | fixes | [Bug] Http demo connection refused | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
Http test will occur with a connection reset error report.
### How to reproduce
Run the http demo according to the documentation.
### Debug logs
org.apache.http.conn.HttpHostConnectException: Connect to 10.59.1.2:8088 [/10.59.1.2] failed: Connection refused: connect
at org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:156) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:376) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:393) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:186) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:108) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.eventmesh.runtime.util.WebhookUtil.obtainDeliveryAgreement(WebhookUtil.java:64) ~[eventmesh-runtime-1.7.0-release.jar:1.7.0-release]
at org.apache.eventmesh.runtime.core.protocol.http.processor.SubscribeProcessor.processRequest(SubscribeProcessor.java:180) ~[eventmesh-runtime-1.7.0-release.jar:1.7.0-release]
at org.apache.eventmesh.runtime.boot.AbstractHTTPServer$HTTPHandler.lambda$processEventMeshRequest$1(AbstractHTTPServer.java:532) ~[eventmesh-runtime-1.7.0-release.jar:1.7.0-release]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [?:1.8.0_181]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_181]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_181]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_181]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_181]
Caused by: java.net.ConnectException: Connection refused: connect
at java.net.DualStackPlainSocketImpl.connect0(Native Method) ~[?:1.8.0_181]
at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:79) ~[?:1.8.0_181]
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_181]
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_181]
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_181]
at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) ~[?:1.8.0_181]
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_181]
at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_181]
at org.apache.http.conn.socket.PlainConnectionSocketFactory.connectSocket(PlainConnectionSocketFactory.java:75) ~[httpclient-4.5.13.jar:4.5.13]
at org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:142) ~[httpclient-4.5.13.jar:4.5.13]
... 17 more
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR! | 03e088e189ca85f98f88e9efbf30c34d89744b01 | ee73d5d8d4fee6534be79f49bfdcdbf43a2ae49c | https://github.com/apache/eventmesh/compare/03e088e189ca85f98f88e9efbf30c34d89744b01...ee73d5d8d4fee6534be79f49bfdcdbf43a2ae49c | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
index f7a2e7da5..aa885b2a3 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java
@@ -27,7 +27,6 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.message.BasicHeader;
-import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
@@ -65,9 +64,9 @@ public class WebhookUtil {
final String allowedOrigin = response.getLastHeader(ALLOWED_ORIGIN_HEADER).getValue();
return StringUtils.isEmpty(allowedOrigin)
|| "*".equals(allowedOrigin) || allowedOrigin.equalsIgnoreCase(requestOrigin);
- } catch (IOException e) {
+ } catch (Exception e) {
LOGGER.error("HTTP Options Method is not supported at the Delivery Target: {}, "
- + "unable to obtain the webhook delivery agreement.", targetUrl, e);
+ + "unable to obtain the webhook delivery agreement.", targetUrl);
}
return true;
} | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/util/WebhookUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,452,096 | 669,686 | 89,915 | 671 | 265 | 51 | 5 | 1 | 3,822 | 173 | 1,078 | 58 | 1 | 0 | 2023-01-06T08:45:00 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,994 | apache/eventmesh/3046/998 | apache | eventmesh | https://github.com/apache/eventmesh/issues/998 | https://github.com/apache/eventmesh/pull/3046 | https://github.com/apache/eventmesh/pull/3046 | 1 | fixes | [Bug] Forget to override hashcode() | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
This class overrides equals(Object), but does not override hashCode(). It just inherits the implementation of hashCode() from java.lang.Object.
### How to reproduce
In module eventmesh-runtime
In class org.apache.eventmesh.runtime.core.protocol.tcp.client.session.Session
In method org.apache.eventmesh.runtime.core.protocol.tcp.client.session.Session.equals(Object)
In [lines 257-273]
### Debug logs
_No response_
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR! | e62b600e10ff6b08e96ecd3dc17fb6b746ffeffc | 41e37b9cf97b75cca8d6b5c74aa0f822779779b2 | https://github.com/apache/eventmesh/compare/e62b600e10ff6b08e96ecd3dc17fb6b746ffeffc...41e37b9cf97b75cca8d6b5c74aa0f822779779b2 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
index 52b8d751b..5d2424988 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
@@ -280,7 +280,24 @@ public class Session {
}
return true;
}
+
+ @Override
+ public int hashCode() {
+ int result = 1001; //primeNumber
+ if (null != client) {
+ result += 31 * result + Objects.hash(client);
+ }
+
+ if (null != context) {
+ result += 31 * result + Objects.hash(context);
+ }
+ if (null != sessionState) {
+ result += 31 * result + Objects.hash(sessionState);
+ }
+ return result;
+ }
+
public WeakReference<ClientGroupWrapper> getClientGroupWrapper() {
return clientGroupWrapper;
} | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,424,565 | 664,296 | 90,020 | 686 | 449 | 99 | 17 | 1 | 720 | 84 | 156 | 31 | 1 | 0 | 2023-02-04T09:25:57 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,993 | apache/eventmesh/3112/3077 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3077 | https://github.com/apache/eventmesh/pull/3112 | https://github.com/apache/eventmesh/pull/3112 | 1 | fixes | [Bug] v1.7.0 KafkaConsumerRunner.java Use break to break out of the while | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
Other
### What happened
Use break to break out of the while Cause the consumer thread to stop
### How to reproduce
.
### Debug logs
_No response_
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR! | 27f61f1559c3a2643cca539efa7c4e78172bfb61 | 2dc558bc4eccb0df450b9093fcde6e5a6bc1c156 | https://github.com/apache/eventmesh/compare/27f61f1559c3a2643cca539efa7c4e78172bfb61...2dc558bc4eccb0df450b9093fcde6e5a6bc1c156 | diff --git a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
index 26b86e525..300289254 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
@@ -58,8 +58,8 @@ public class KafkaConsumerRunner implements Runnable {
@Override
public void run() {
- while (!closed.get()) {
- try {
+ try {
+ while (!closed.get()) {
ConsumerRecords<String, CloudEvent> records = consumer.poll(Duration.ofMillis(10000));
// Handle new records
records.forEach(rec -> {
@@ -73,7 +73,7 @@ public class KafkaConsumerRunner implements Runnable {
case CommitMessage:
// update offset
logger.info("message commit, topic: {}, current offset:{}", topicName,
- rec.offset());
+ rec.offset());
break;
case ReconsumeLater:
// don't update offset
@@ -81,7 +81,7 @@ public class KafkaConsumerRunner implements Runnable {
case ManualAck:
// update offset
logger
- .info("message ack, topic: {}, current offset:{}", topicName, rec.offset());
+ .info("message ack, topic: {}, current offset:{}", topicName, rec.offset());
break;
default:
}
@@ -95,15 +95,14 @@ public class KafkaConsumerRunner implements Runnable {
logger.info("Error parsing cloudevents: {}", e.getMessage());
}
});
- } catch (WakeupException e) {
- // Ignore exception if closing
- if (!closed.get()) {
- throw e;
- }
- } finally {
- consumer.close();
- break;
}
+ } catch (WakeupException e) {
+ // Ignore exception if closing
+ if (!closed.get()) {
+ throw e;
+ }
+ } finally {
+ consumer.close();
}
}
| ['eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,420,711 | 663,873 | 89,939 | 686 | 947 | 147 | 23 | 1 | 424 | 69 | 103 | 29 | 1 | 0 | 2023-02-10T10:13:13 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,992 | apache/eventmesh/3274/3273 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3273 | https://github.com/apache/eventmesh/pull/3274 | https://github.com/apache/eventmesh/pull/3274 | 1 | fixes | [Bug] Incorrect validation may cause the batch send of events to fail | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
path is `org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java`
i think this is a bug when i read code of master branch. This will cause the batch send of events to fail.
```java
if (eventData != null || StringUtils.isBlank(batchId)
|| StringUtils.isBlank(producerGroup)
|| eventSize != eventList.size()) {
responseEventMeshCommand = asyncContext.getRequest().createHttpCommandResponse(
sendMessageBatchResponseHeader,
SendMessageBatchResponseBody.buildBody(EventMeshRetCode.EVENTMESH_PROTOCOL_BODY_ERR.getRetCode(),
EventMeshRetCode.EVENTMESH_PROTOCOL_BODY_ERR.getErrMsg()));
asyncContext.onComplete(responseEventMeshCommand);
return;
}
```
The validation of `eventData` should be changed to '==null'.
### How to reproduce
just read the code
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 8a014d4d2d003d58f612cea0d086ede7208b6f4a | 8917ab622add5d258127ad0281c175f0fad313ae | https://github.com/apache/eventmesh/compare/8a014d4d2d003d58f612cea0d086ede7208b6f4a...8917ab622add5d258127ad0281c175f0fad313ae | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java
index 8ae17b524..434be4c59 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java
@@ -177,7 +177,7 @@ public class BatchSendMessageProcessor implements HttpRequestProcessor {
eventSize = Integer.parseInt(Objects.requireNonNull(event.getExtension(SendMessageBatchRequestBody.SIZE)).toString());
CloudEventData eventData = event.getData();
- if (eventData != null || StringUtils.isBlank(batchId)
+ if (eventData == null || StringUtils.isBlank(batchId)
|| StringUtils.isBlank(producerGroup)
|| eventSize != eventList.size()) {
responseEventMeshCommand = asyncContext.getRequest().createHttpCommandResponse( | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageProcessor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,395,895 | 664,247 | 90,218 | 688 | 133 | 26 | 2 | 1 | 1,320 | 116 | 259 | 43 | 1 | 1 | 2023-02-26T07:22:17 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,991 | apache/eventmesh/3287/3284 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3284 | https://github.com/apache/eventmesh/pull/3287 | https://github.com/apache/eventmesh/pull/3287 | 1 | fixes | [Bug] HTTP server HttpRequest release bug | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
HttpRequest reference exception occurs in handlerService.handler
```
private class HTTPHandler extends SimpleChannelInboundHandler<HttpRequest> {
......
@Override
protected void channelRead0(ChannelHandlerContext ctx, HttpRequest httpRequest) throws Exception {
if (httpRequest == null) {
return;
}
if (Objects.nonNull(handlerService) && handlerService.isProcessorWrapper(httpRequest)) {
handlerService.handler(ctx, httpRequest, asyncContextCompleteHandler);
return;
}
......
}
```
handlerService.handler has httpRequest release bug
### How to reproduce
Test handler message.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | ba4ebe61af548f0814000341ae8dc0452f68f89e | da8e1644f448ea51083373b6d8b91505d7d1d3a0 | https://github.com/apache/eventmesh/compare/ba4ebe61af548f0814000341ae8dc0452f68f89e...da8e1644f448ea51083373b6d8b91505d7d1d3a0 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java
index 078eaf9cb..aadfb90f6 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java
@@ -66,10 +66,10 @@ import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
-import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.epoll.EpollServerSocketChannel;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
@@ -95,6 +95,7 @@ import io.netty.handler.codec.http.multipart.InterfaceHttpData;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
+import io.netty.util.ReferenceCountUtil;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
@@ -331,21 +332,23 @@ public abstract class AbstractHTTPServer extends AbstractRemotingServer {
}
@Sharable
- private class HTTPHandler extends SimpleChannelInboundHandler<HttpRequest> {
+ private class HTTPHandler extends ChannelInboundHandlerAdapter {
/**
* Is called for each message of type {@link HttpRequest}.
*
- * @param ctx the {@link ChannelHandlerContext} which this {@link SimpleChannelInboundHandler} belongs to
- * @param httpRequest the message to handle
+ * @param ctx the {@link ChannelHandlerContext} which this {@link ChannelInboundHandlerAdapter} belongs to
+ * @param msg the message to handle
* @throws Exception is thrown if an error occurred
*/
@Override
- protected void channelRead0(ChannelHandlerContext ctx, HttpRequest httpRequest) throws Exception {
- if (httpRequest == null) {
+ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
+ if (!(msg instanceof HttpRequest)) {
return;
}
+ HttpRequest httpRequest = (HttpRequest) msg;
+
if (Objects.nonNull(handlerService) && handlerService.isProcessorWrapper(httpRequest)) {
handlerService.handler(ctx, httpRequest, asyncContextCompleteHandler);
return;
@@ -443,7 +446,9 @@ public abstract class AbstractHTTPServer extends AbstractRemotingServer {
}
} catch (Exception ex) {
- log.error("execute AbstractHTTPServer.HTTPHandler.channelRead0 error", ex);
+ log.error("AbstractHTTPServer.HTTPHandler.channelRead error", ex);
+ } finally {
+ ReferenceCountUtil.release(httpRequest);
}
}
diff --git a/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/http/producer/EventMeshMessageProducer.java b/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/http/producer/EventMeshMessageProducer.java
index fcfcc9659..5ba3f0fea 100644
--- a/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/http/producer/EventMeshMessageProducer.java
+++ b/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/http/producer/EventMeshMessageProducer.java
@@ -98,7 +98,7 @@ class EventMeshMessageProducer extends AbstractProducerHttpClient<EventMeshMessa
final SendMessageResponseBody.ReplyMessage replyMessage = JsonUtils.parseObject(retObj.getRetMsg(),
SendMessageResponseBody.ReplyMessage.class);
return EventMeshMessage.builder()
- .content(Objects.requireNonNUll(replyMessage, "ReplyMessage must not be null").body)
+ .content(Objects.requireNonNull(replyMessage, "ReplyMessage must not be null").body)
.prop(replyMessage.properties)
.topic(replyMessage.topic).build();
} | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java', 'eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/http/producer/EventMeshMessageProducer.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,395,757 | 664,229 | 90,213 | 688 | 1,443 | 266 | 21 | 2 | 1,066 | 105 | 205 | 46 | 1 | 1 | 2023-02-27T12:27:23 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,990 | apache/eventmesh/3302/3306 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3306 | https://github.com/apache/eventmesh/pull/3302 | https://github.com/apache/eventmesh/pull/3302 | 1 | fixes | [Bug] KafkaConsumerRunner Thread Closes Unexceptedly | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
See below for the `how to reproduce` section
### How to reproduce
0. Start your Kafka
1. Run the `StartUp` of EventMesh
2. Run the `SpringBootDemoApplication` in the HTTP demo
3. Run the `AsyncPublishInsatance` message in HTTP to publish a message
The KafkaConsumerRunner will fail to poll the message and close
### Debug logs
No immediate exception logs
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 8a014d4d2d003d58f612cea0d086ede7208b6f4a | 0b0786cc67da51e58125ed623153a04d9a3f0bd4 | https://github.com/apache/eventmesh/compare/8a014d4d2d003d58f612cea0d086ede7208b6f4a...0b0786cc67da51e58125ed623153a04d9a3f0bd4 | diff --git a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
index eb370c459..6906474c5 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
@@ -53,12 +53,14 @@ public class ConsumerImpl {
private Set<String> topicsSet;
public ConsumerImpl(final Properties properties) {
+ // Setting the ClassLoader to null is necessary for Kafka consumer configuration
+ final ClassLoader original = Thread.currentThread().getContextClassLoader();
+ Thread.currentThread().setContextClassLoader(null);
+
Properties props = new Properties();
-
- // Other config props
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG));
- props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, CloudEventDeserializer.class);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, CloudEventDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, properties.getProperty(ConsumerConfig.GROUP_ID_CONFIG));
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
@@ -67,6 +69,8 @@ public class ConsumerImpl {
kafkaConsumerRunner = new KafkaConsumerRunner(this.kafkaConsumer);
executorService = Executors.newFixedThreadPool(10);
topicsSet = new HashSet<>();
+
+ Thread.currentThread().setContextClassLoader(original);
}
public Properties attributes() {
diff --git a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
index bed94c650..381d1f206 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java
@@ -62,6 +62,10 @@ public class KafkaConsumerRunner implements Runnable {
public void run() {
try {
while (!closed.get()) {
+ if (consumer.subscription().isEmpty()) {
+ // consumer cannot poll if it is subscribe to nothing
+ continue;
+ }
ConsumerRecords<String, CloudEvent> records = consumer.poll(Duration.ofMillis(10000));
// Handle new records
records.forEach(rec -> {
@@ -82,8 +86,7 @@ public class KafkaConsumerRunner implements Runnable {
break;
case ManualAck:
// update offset
- log
- .info("message ack, topic: {}, current offset:{}", topicName, rec.offset());
+ log.info("message ack, topic: {}, current offset:{}", topicName, rec.offset());
break;
default:
}
@@ -113,7 +116,4 @@ public class KafkaConsumerRunner implements Runnable {
closed.set(true);
consumer.wakeup();
}
-}
-
-
-
+}
\\ No newline at end of file | ['eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/KafkaConsumerRunner.java', 'eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,395,895 | 664,247 | 90,218 | 688 | 1,024 | 163 | 22 | 2 | 689 | 102 | 167 | 33 | 1 | 0 | 2023-03-01T00:01:34 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,989 | apache/eventmesh/3308/3307 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3307 | https://github.com/apache/eventmesh/pull/3308 | https://github.com/apache/eventmesh/pull/3308 | 1 | fixes | [Bug] Kafka Consumer Closing Twice | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
It is possible that the Kafka consumer will be closed twice. It will raise an `IllegalStateException` when being closed again.
### How to reproduce
1. Run Eventmesh on Kafka
2. Call `shutdown` to the consumer implementation
The exception will be raised.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 6636e3af2ef130a47ecd3276e14aa34f0232b583 | 7d37d232e15c9fffc986f33acb6ef3961a3d108d | https://github.com/apache/eventmesh/compare/6636e3af2ef130a47ecd3276e14aa34f0232b583...7d37d232e15c9fffc986f33acb6ef3961a3d108d | diff --git a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
index eb370c459..99c492d06 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java
@@ -82,7 +82,9 @@ public class ConsumerImpl {
public synchronized void shutdown() {
if (this.started.compareAndSet(true, false)) {
- this.kafkaConsumer.close();
+ // Shutdown the executor and interrupt any running tasks
+ kafkaConsumerRunner.shutdown();
+ executorService.shutdown();
}
}
| ['eventmesh-connector-plugin/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/consumer/ConsumerImpl.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,395,895 | 664,247 | 90,218 | 688 | 196 | 28 | 4 | 1 | 589 | 90 | 138 | 31 | 1 | 0 | 2023-03-01T11:07:59 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,988 | apache/eventmesh/3432/3431 | apache | eventmesh | https://github.com/apache/eventmesh/issues/3431 | https://github.com/apache/eventmesh/pull/3432 | https://github.com/apache/eventmesh/pull/3432 | 1 | fixes | [Bug]ProducerTopicManager throw NPE when runtime module started | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
when I start eventmesh runtime module. when ProducerTopicManager.scheduler execute then will throw java.lang.NullPointerException:
```
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:155) - maxPushMsgTPS: 0.0, avgPushMsgTPS: 0.0, sum: 0, sumFail: 0, sumFailRate: 0.0, maxClientLatency: 0.0, avgClientLatency: 0.0
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:170) - batchMsgQ: 0, sendMsgQ: 0, pushMsgQ: 0, httpRetryQ: 0
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:178) - batchAvgSend2MQCost: 0.0, avgSend2MQCost: 0.0, avgReply2MQCost: 0.0
java.lang.NullPointerException
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerTopicManager.lambda$start$1(ProducerTopicManager.java:69)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset$$$capture(FutureTask.java:308)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2023-03-12 12:28:02,065 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:02,065 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
2023-03-12 12:28:12,060 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:12,060 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
java.lang.NullPointerException
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerTopicManager.lambda$start$1(ProducerTopicManager.java:69)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset$$$capture(FutureTask.java:308)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2023-03-12 12:28:22,063 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:22,063 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
```
### How to reproduce
start eventmesh runtime
### Debug logs
```
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:155) - maxPushMsgTPS: 0.0, avgPushMsgTPS: 0.0, sum: 0, sumFail: 0, sumFailRate: 0.0, maxClientLatency: 0.0, avgClientLatency: 0.0
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:170) - batchMsgQ: 0, sendMsgQ: 0, pushMsgQ: 0, httpRetryQ: 0
2023-03-12 12:27:53,048 INFO [eventMesh-metrics-2] HTTPMetricsServer(HTTPMetricsServer.java:178) - batchAvgSend2MQCost: 0.0, avgSend2MQCost: 0.0, avgReply2MQCost: 0.0
java.lang.NullPointerException
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerTopicManager.lambda$start$1(ProducerTopicManager.java:69)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset$$$capture(FutureTask.java:308)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2023-03-12 12:28:02,065 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:02,065 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
2023-03-12 12:28:12,060 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:12,060 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
java.lang.NullPointerException
at org.apache.eventmesh.runtime.core.protocol.http.producer.ProducerTopicManager.lambda$start$1(ProducerTopicManager.java:69)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset$$$capture(FutureTask.java:308)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2023-03-12 12:28:22,063 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:214) - grpc client info check
2023-03-12 12:28:22,063 DEBUG [pool-4-thread-1] ConsumerManager(ConsumerManager.java:223) - total number of ConsumerGroupClients: 0
```
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 8aa97ac2df2fde85e8fbd5b0c51132f41f6447f4 | 38cb976a5ac0420f6a861be5a91393b1049ba292 | https://github.com/apache/eventmesh/compare/8aa97ac2df2fde85e8fbd5b0c51132f41f6447f4...38cb976a5ac0420f6a861be5a91393b1049ba292 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java
index dfbad0a38..5d0fd427a 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java
@@ -218,4 +218,8 @@ public class EventMeshServer {
public ProducerTopicManager getProducerTopicManager() {
return producerTopicManager;
}
+
+ public CommonConfiguration getConfiguration() {
+ return configuration;
+ }
}
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/ProducerTopicManager.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/ProducerTopicManager.java
index 785540424..5568cad59 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/ProducerTopicManager.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/ProducerTopicManager.java
@@ -23,39 +23,33 @@ import org.apache.eventmesh.common.ThreadPoolFactory;
import org.apache.eventmesh.runtime.boot.EventMeshServer;
import java.util.List;
+import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
+import lombok.extern.slf4j.Slf4j;
+@Slf4j
public class ProducerTopicManager {
- private Logger retryLogger = LoggerFactory.getLogger("p-topic-m");
-
- private Logger logger = LoggerFactory.getLogger(this.getClass());
-
private EventMeshServer eventMeshServer;
- public ProducerTopicManager(EventMeshServer eventMeshServer) {
- this.eventMeshServer = eventMeshServer;
- }
-
private transient ScheduledFuture<?> scheduledTask;
protected static ScheduledExecutorService scheduler;
- private ConcurrentHashMap<String, EventMeshServicePubTopicInfo> eventMeshServicePubTopicInfoMap = new ConcurrentHashMap<>();
+ private ConcurrentHashMap<String, EventMeshServicePubTopicInfo> eventMeshServicePubTopicInfoMap = new ConcurrentHashMap<>(64);
- public void init() {
+ public ProducerTopicManager(EventMeshServer eventMeshServer) {
+ this.eventMeshServer = eventMeshServer;
+ }
+ public void init() {
scheduler = ThreadPoolFactory.createScheduledExecutor(Runtime.getRuntime().availableProcessors(),
new EventMeshThreadFactory("Producer-Topic-Manager", true));
- logger.info("ProducerTopicManager inited......");
-
+ log.info("ProducerTopicManager inited......");
}
public void start() {
@@ -63,29 +57,28 @@ public class ProducerTopicManager {
if (scheduledTask == null) {
synchronized (ProducerTopicManager.class) {
scheduledTask = scheduler.scheduleAtFixedRate(() -> {
-
try {
- List<EventMeshServicePubTopicInfo> list = eventMeshServer.getRegistry().findEventMeshServicePubTopicInfos();
- list.forEach(e -> {
- eventMeshServicePubTopicInfoMap.put(e.getService(), e);
- });
+ if (!eventMeshServer.getConfiguration().isEventMeshServerRegistryEnable()) {
+ return;
+ }
+ List<EventMeshServicePubTopicInfo> pubTopicInfoList = eventMeshServer.getRegistry().findEventMeshServicePubTopicInfos();
+ Optional.ofNullable(pubTopicInfoList)
+ .ifPresent(lt -> lt.forEach(item -> eventMeshServicePubTopicInfoMap.put(item.getService(), item)));
} catch (Exception e) {
- e.printStackTrace();
+ log.error("ProducerTopicManager update eventMesh pub topic info error. ", e);
}
}, 5, 20, TimeUnit.SECONDS);
}
}
-
-
- logger.info("ProducerTopicManager started......");
+ log.info("ProducerTopicManager started......");
}
public void shutdown() {
if (scheduledTask != null) {
scheduledTask.cancel(false);
}
- logger.info("ProducerTopicManager shutdown......");
+ log.info("ProducerTopicManager shutdown......");
}
public ConcurrentHashMap<String, EventMeshServicePubTopicInfo> getEventMeshServicePubTopicInfoMap() {
@@ -97,4 +90,4 @@ public class ProducerTopicManager {
}
-}
\\ No newline at end of file
+}
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
index b76339c1d..9030fae02 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java
@@ -326,19 +326,19 @@ public class Session {
public void trySendListenResponse(Header header, long startTime, long taskExecuteTime) {
if (!listenRspSend && listenRspLock.tryLock()) {
- if (!listenRspSend) {
- if (header == null) {
- header = new Header(LISTEN_RESPONSE, OPStatus.SUCCESS.getCode(), "succeed", null);
- }
- Package msg = new Package();
- msg.setHeader(header);
-
- // TODO: if startTime is modified
- Utils.writeAndFlush(msg, startTime, taskExecuteTime, context, this);
- listenRspSend = true;
+ if (!listenRspSend) {
+ if (header == null) {
+ header = new Header(LISTEN_RESPONSE, OPStatus.SUCCESS.getCode(), "succeed", null);
}
- listenRspLock.unlock();
-
+ Package msg = new Package();
+ msg.setHeader(header);
+
+ // TODO: if startTime is modified
+ Utils.writeAndFlush(msg, startTime, taskExecuteTime, context, this);
+ listenRspSend = true;
+ }
+ listenRspLock.unlock();
+
}
}
| ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/producer/ProducerTopicManager.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/tcp/client/session/Session.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 3,744,368 | 733,522 | 99,974 | 796 | 3,292 | 589 | 73 | 3 | 6,449 | 365 | 1,740 | 91 | 1 | 2 | 2023-03-12T04:54:04 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,002 | apache/eventmesh/2164/2163 | apache | eventmesh | https://github.com/apache/eventmesh/issues/2163 | https://github.com/apache/eventmesh/pull/2164 | https://github.com/apache/eventmesh/pull/2164 | 1 | fix | [Bug] This method needlessly uses a String literal as a Charset encoding [SendSyncMessageProcessor] | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Linux
### EventMesh version
master
### What happened
This method needlessly uses a String literal as a Charset encoding [SendSyncMessageProcessor]
### How to reproduce
SendSyncMessageProcessor类中第283行不要指定"UTF-8",使用StandardCharsets.UTF_8。
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | b82146640db357b9fc65c528aa983f5b53adae33 | 136a1fbe02ef2bff3d8cf66a8f36c111f42dbda9 | https://github.com/apache/eventmesh/compare/b82146640db357b9fc65c528aa983f5b53adae33...136a1fbe02ef2bff3d8cf66a8f36c111f42dbda9 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendSyncMessageProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendSyncMessageProcessor.java
index dfd79556a..436006976 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendSyncMessageProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendSyncMessageProcessor.java
@@ -281,7 +281,7 @@ public class SendSyncMessageProcessor implements HttpRequestProcessor {
String.valueOf(System.currentTimeMillis()))
.build();
final String rtnMsg = new String(Objects.requireNonNull(event.getData()).toBytes(),
- EventMeshConstants.DEFAULT_CHARSET);
+ StandardCharsets.UTF_8);
HttpCommand succ = asyncContext.getRequest().createHttpCommandResponse(
sendMessageResponseHeader, | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendSyncMessageProcessor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,309,979 | 650,909 | 86,927 | 639 | 127 | 14 | 2 | 1 | 516 | 66 | 131 | 28 | 1 | 0 | 2022-11-11T05:29:17 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
9,985 | apache/eventmesh/4255/4236 | apache | eventmesh | https://github.com/apache/eventmesh/issues/4236 | https://github.com/apache/eventmesh/pull/4255 | https://github.com/apache/eventmesh/pull/4255 | 1 | fix | [Bug] TCP reconnection failed | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
TCP reconnection failed
### How to reproduce
- run class org.apache.eventmesh.tcp.demo.sub.cloudevents.AsyncSubscribe
- use idea breakpoint 30s
### Debug logs

When TCP timeout reconnects,it will make a mistake, The following is the complete error message:
```
org.apache.eventmesh.client.tcp.impl.cloudevent.CloudEventTCPPubClient$cloudEventTCPPubHandler is not a @Sharable handler, so can't be added or removed multiple times.
```
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 8ad08ba1e8d7b773d77b27f83a9ea4cbabb920bb | 1ba925bdd48a57b91a77a66b5a89e501209f48fd | https://github.com/apache/eventmesh/compare/8ad08ba1e8d7b773d77b27f83a9ea4cbabb920bb...1ba925bdd48a57b91a77a66b5a89e501209f48fd | diff --git a/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPPubClient.java b/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPPubClient.java
index ed98bbce4..35f2186b3 100644
--- a/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPPubClient.java
+++ b/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPPubClient.java
@@ -37,6 +37,7 @@ import io.cloudevents.CloudEvent;
import io.cloudevents.core.format.EventFormat;
import io.cloudevents.core.provider.EventFormatProvider;
import io.cloudevents.jackson.JsonFormat;
+import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import com.google.common.base.Preconditions;
@@ -138,6 +139,7 @@ class CloudEventTCPPubClient extends TcpClient implements EventMeshTCPPubClient<
}
}
+ @Sharable
private class CloudEventTCPPubHandler extends AbstractEventMeshTCPPubHandler<CloudEvent> {
public CloudEventTCPPubHandler(ConcurrentHashMap<Object, RequestContext> contexts) {
diff --git a/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPSubClient.java b/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPSubClient.java
index dbebc6dcc..3e9adb3b5 100644
--- a/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPSubClient.java
+++ b/eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPSubClient.java
@@ -44,6 +44,7 @@ import io.cloudevents.CloudEvent;
import io.cloudevents.core.format.EventFormat;
import io.cloudevents.core.provider.EventFormatProvider;
import io.cloudevents.jackson.JsonFormat;
+import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import com.google.common.base.Preconditions;
@@ -141,6 +142,7 @@ class CloudEventTCPSubClient extends TcpClient implements EventMeshTCPSubClient<
}
}
+ @Sharable
private class CloudEventTCPSubHandler extends AbstractEventMeshTCPSubHandler<CloudEvent> {
public CloudEventTCPSubHandler( | ['eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPSubClient.java', 'eventmesh-sdks/eventmesh-sdk-java/src/main/java/org/apache/eventmesh/client/tcp/impl/cloudevent/CloudEventTCPPubClient.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,352,761 | 662,665 | 88,093 | 768 | 128 | 30 | 4 | 2 | 848 | 95 | 220 | 35 | 2 | 1 | 2023-07-20T02:33:42 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,000 | apache/eventmesh/2626/981 | apache | eventmesh | https://github.com/apache/eventmesh/issues/981 | https://github.com/apache/eventmesh/pull/2626 | https://github.com/apache/eventmesh/pull/2626 | 1 | fixes | [Bug] Possible null pointer dereference | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
The return value from a method is dereferenced without a null check, and the return value of that method is one that should generally be checked for null. This may lead to a NullPointerException when the code is executed.
The bug appears in the package org.apache.eventmesh.runtime.core.protocol.http.processor many times.
### How to reproduce
It will happen if the return value is null.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 26288e67dc6aac34cc0b0ee4de8db80ce63f8d46 | 9befcad3417c38b4dece0375f21944011e2b3f2d | https://github.com/apache/eventmesh/compare/26288e67dc6aac34cc0b0ee4de8db80ce63f8d46...9befcad3417c38b4dece0375f21944011e2b3f2d | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageV2Processor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageV2Processor.java
index a9935323b..482a4afc5 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageV2Processor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageV2Processor.java
@@ -63,7 +63,7 @@ public class BatchSendMessageV2Processor implements HttpRequestProcessor {
public Logger aclLogger = LoggerFactory.getLogger("acl");
- private EventMeshHTTPServer eventMeshHTTPServer;
+ private final EventMeshHTTPServer eventMeshHTTPServer;
public BatchSendMessageV2Processor(EventMeshHTTPServer eventMeshHTTPServer) {
this.eventMeshHTTPServer = eventMeshHTTPServer;
@@ -175,12 +175,9 @@ public class BatchSendMessageV2Processor implements HttpRequestProcessor {
//do acl check
if (eventMeshHTTPServer.getEventMeshHttpConfiguration().isEventMeshServerSecurityEnable()) {
String remoteAddr = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
- String user = event.getExtension(ProtocolKey.ClientInstanceKey.USERNAME) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.USERNAME).toString();
- String pass = event.getExtension(ProtocolKey.ClientInstanceKey.PASSWD) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.PASSWD).toString();
- String subsystem = event.getExtension(ProtocolKey.ClientInstanceKey.SYS) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.SYS).toString();
+ String user = getExtension(event, ProtocolKey.ClientInstanceKey.USERNAME);
+ String pass = getExtension(event, ProtocolKey.ClientInstanceKey.PASSWD);
+ String subsystem = getExtension(event, ProtocolKey.ClientInstanceKey.SYS);
try {
Acl.doAclCheckInHttpSend(remoteAddr, user, pass, subsystem, topic, requestCode);
} catch (Exception e) {
@@ -226,7 +223,7 @@ public class BatchSendMessageV2Processor implements HttpRequestProcessor {
String defaultTTL = String.valueOf(EventMeshConstants.DEFAULT_MSG_TTL_MILLS);
// todo: use hashmap to avoid copy
- String ttlValue = event.getExtension(SendMessageRequestBody.TTL) == null ? "" : event.getExtension(SendMessageRequestBody.TTL).toString();
+ String ttlValue = getExtension(event, SendMessageRequestBody.TTL);
if (StringUtils.isBlank(ttlValue) && !StringUtils.isNumeric(ttlValue)) {
event = CloudEventBuilder.from(event).withExtension(SendMessageRequestBody.TTL, defaultTTL)
.build();
@@ -309,6 +306,11 @@ public class BatchSendMessageV2Processor implements HttpRequestProcessor {
}
+ private String getExtension(CloudEvent event, String protocolKey) {
+ Object extension = event.getExtension(protocolKey);
+ return Objects.isNull(extension) ? "" : extension.toString();
+ }
+
@Override
public boolean rejectRequest() {
return false;
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/HandlerService.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/HandlerService.java
index f69869674..75dc3aaa5 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/HandlerService.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/HandlerService.java
@@ -38,6 +38,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
+import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadPoolExecutor;
@@ -187,8 +188,12 @@ public class HandlerService {
if (length > 0) {
byte[] body = new byte[length];
fullHttpRequest.content().readBytes(body);
- JsonUtils.deserialize(new String(body, Constants.DEFAULT_CHARSET), new TypeReference<Map<String, Object>>() {
- }).forEach(bodyMap::put);
+ Optional
+ .ofNullable(JsonUtils.deserialize(
+ new String(body, Constants.DEFAULT_CHARSET),
+ new TypeReference<Map<String, Object>>() {}
+ ))
+ .ifPresent(bodyMap::putAll);
}
} else {
HttpPostRequestDecoder decoder =
@@ -206,7 +211,10 @@ public class HandlerService {
throw new RuntimeException("UnSupported Method " + fullHttpRequest.method());
}
- byte[] requestBody = JsonUtils.serialize(bodyMap).getBytes(StandardCharsets.UTF_8);
+ byte[] requestBody = Optional.ofNullable(JsonUtils.serialize(bodyMap))
+ .map(s -> s.getBytes(StandardCharsets.UTF_8))
+ .orElse(new byte[0]);
+
httpEventWrapper.setBody(requestBody);
metrics.getSummaryMetrics().recordDecodeTimeCost(System.currentTimeMillis() - bodyDecodeStart);
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalSubscribeEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalSubscribeEventProcessor.java
index fe09d92f2..94b14d919 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalSubscribeEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalSubscribeEventProcessor.java
@@ -41,12 +41,13 @@ import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import org.slf4j.Logger;
@@ -119,8 +120,10 @@ public class LocalSubscribeEventProcessor extends AbstractEventProcessor impleme
//validate body
byte[] requestBody = requestWrapper.getBody();
- Map<String, Object> requestBodyMap = JsonUtils.deserialize(new String(requestBody, Constants.DEFAULT_CHARSET),
- new TypeReference<HashMap<String, Object>>() {});
+ Map<String, Object> requestBodyMap = Optional.ofNullable(JsonUtils.deserialize(
+ new String(requestBody, Constants.DEFAULT_CHARSET),
+ new TypeReference<HashMap<String, Object>>() {}
+ )).orElse(new HashMap<>());
if (requestBodyMap.get("url") == null || requestBodyMap.get("topic") == null || requestBodyMap.get("consumerGroup") == null) {
handlerSpecific.sendErrorResponse(EventMeshRetCode.EVENTMESH_PROTOCOL_BODY_ERR, responseHeaderMap,
@@ -133,8 +136,10 @@ public class LocalSubscribeEventProcessor extends AbstractEventProcessor impleme
String topic = JsonUtils.serialize(requestBodyMap.get("topic"));
// SubscriptionItem
- List<SubscriptionItem> subscriptionList = JsonUtils.deserialize(topic, new TypeReference<List<SubscriptionItem>>() {
- });
+ List<SubscriptionItem> subscriptionList = Optional.ofNullable(JsonUtils.deserialize(
+ topic,
+ new TypeReference<List<SubscriptionItem>>() {}
+ )).orElse(Collections.emptyList());
//do acl check
if (eventMeshHTTPServer.getEventMeshHttpConfiguration().isEventMeshServerSecurityEnable()) {
@@ -213,7 +218,7 @@ public class LocalSubscribeEventProcessor extends AbstractEventProcessor impleme
consumeTopicConfig.setConsumerGroup(consumerGroup);
consumeTopicConfig.setTopic(subTopic.getTopic());
consumeTopicConfig.setSubscriptionItem(subTopic);
- consumeTopicConfig.setUrls(new HashSet<>(Arrays.asList(url)));
+ consumeTopicConfig.setUrls(new HashSet<>(Collections.singletonList(url)));
consumeTopicConfig.setIdcUrls(idcUrls);
@@ -230,7 +235,7 @@ public class LocalSubscribeEventProcessor extends AbstractEventProcessor impleme
newTopicConf.setConsumerGroup(consumerGroup);
newTopicConf.setTopic(subTopic.getTopic());
newTopicConf.setSubscriptionItem(subTopic);
- newTopicConf.setUrls(new HashSet<>(Arrays.asList(url)));
+ newTopicConf.setUrls(new HashSet<>(Collections.singletonList(url)));
newTopicConf.setIdcUrls(idcUrls);
map.put(subTopic.getTopic(), newTopicConf);
}
@@ -244,7 +249,7 @@ public class LocalSubscribeEventProcessor extends AbstractEventProcessor impleme
latestTopicConf.setConsumerGroup(consumerGroup);
latestTopicConf.setTopic(subTopic.getTopic());
latestTopicConf.setSubscriptionItem(subTopic);
- latestTopicConf.setUrls(new HashSet<>(Arrays.asList(url)));
+ latestTopicConf.setUrls(new HashSet<>(Collections.singletonList(url)));
ConsumerGroupTopicConf currentTopicConf = set.getValue();
latestTopicConf.getUrls().addAll(currentTopicConf.getUrls());
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalUnSubscribeEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalUnSubscribeEventProcessor.java
index dff668f64..d19aa49d2 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalUnSubscribeEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalUnSubscribeEventProcessor.java
@@ -37,12 +37,14 @@ import org.apache.eventmesh.runtime.util.RemotingHelper;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import org.slf4j.Logger;
@@ -52,6 +54,7 @@ import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpRequest;
import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.collect.Maps;
@EventMeshTrace(isEnable = false)
public class LocalUnSubscribeEventProcessor extends AbstractEventProcessor implements AsyncHttpProcessor {
@@ -112,9 +115,10 @@ public class LocalUnSubscribeEventProcessor extends AbstractEventProcessor imple
//validate body
byte[] requestBody = requestWrapper.getBody();
- Map<String, Object> requestBodyMap = JsonUtils.deserialize(new String(requestBody, Constants.DEFAULT_CHARSET),
- new TypeReference<HashMap<String, Object>>() {
- });
+ Map<String, Object> requestBodyMap = Optional.ofNullable(JsonUtils.deserialize(
+ new String(requestBody, Constants.DEFAULT_CHARSET),
+ new TypeReference<HashMap<String, Object>>() {}
+ )).orElse(Maps.newHashMap());
if (requestBodyMap.get(EventMeshConstants.URL) == null
|| requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC) == null
@@ -129,8 +133,10 @@ public class LocalUnSubscribeEventProcessor extends AbstractEventProcessor imple
String topic = JsonUtils.serialize(requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC));
// unSubscriptionItem
- List<String> unSubTopicList = JsonUtils.deserialize(topic, new TypeReference<List<String>>() {
- });
+ List<String> unSubTopicList = Optional.ofNullable(JsonUtils.deserialize(
+ topic,
+ new TypeReference<List<String>>() {}
+ )).orElse(Collections.emptyList());
String env = sysHeaderMap.get(ProtocolKey.ClientInstanceKey.ENV).toString();
String idc = sysHeaderMap.get(ProtocolKey.ClientInstanceKey.IDC).toString();
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteSubscribeEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteSubscribeEventProcessor.java
index 83c5c97cd..96cc3c37a 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteSubscribeEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteSubscribeEventProcessor.java
@@ -46,9 +46,11 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,6 +60,7 @@ import io.netty.handler.codec.http.HttpRequest;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
@EventMeshTrace(isEnable = false)
public class RemoteSubscribeEventProcessor extends AbstractEventProcessor implements AsyncHttpProcessor {
@@ -122,9 +125,10 @@ public class RemoteSubscribeEventProcessor extends AbstractEventProcessor implem
//validate body
byte[] requestBody = requestWrapper.getBody();
- Map<String, Object> requestBodyMap = JsonUtils.deserialize(new String(requestBody, Constants.DEFAULT_CHARSET),
- new TypeReference<HashMap<String, Object>>() {
- });
+ Map<String, Object> requestBodyMap = Optional.ofNullable(JsonUtils.deserialize(
+ new String(requestBody, Constants.DEFAULT_CHARSET),
+ new TypeReference<HashMap<String, Object>>() {}
+ )).orElse(Maps.newHashMap());
if (requestBodyMap.get(EventMeshConstants.URL) == null
@@ -141,8 +145,10 @@ public class RemoteSubscribeEventProcessor extends AbstractEventProcessor implem
// SubscriptionItem
- List<SubscriptionItem> subscriptionList = JsonUtils.deserialize(topic, new TypeReference<List<SubscriptionItem>>() {
- });
+ List<SubscriptionItem> subscriptionList = Optional.ofNullable(JsonUtils.deserialize(
+ topic,
+ new TypeReference<List<SubscriptionItem>>() {}
+ )).orElse(Collections.emptyList());
//do acl check
if (eventMeshHTTPServer.getEventMeshHttpConfiguration().isEventMeshServerSecurityEnable()) {
@@ -236,8 +242,10 @@ public class RemoteSubscribeEventProcessor extends AbstractEventProcessor implem
String remoteResult = post(closeableHttpClient, targetMesh, remoteHeaderMap, remoteBodyMap,
response -> EntityUtils.toString(response.getEntity(), Constants.DEFAULT_CHARSET));
- Map<String, String> remoteResultMap = JsonUtils.deserialize(remoteResult, new TypeReference<Map<String, String>>() {
- });
+ Map<String, String> remoteResultMap = Optional.ofNullable(JsonUtils.deserialize(
+ remoteResult,
+ new TypeReference<Map<String, String>>() {}
+ )).orElse(Maps.newHashMap());
if (String.valueOf(EventMeshRetCode.SUCCESS.getRetCode()).equals(remoteResultMap.get(EventMeshConstants.RET_CODE))) {
responseBodyMap.put(EventMeshConstants.RET_CODE, EventMeshRetCode.SUCCESS.getRetCode());
@@ -286,7 +294,7 @@ public class RemoteSubscribeEventProcessor extends AbstractEventProcessor implem
//body
if (MapUtils.isNotEmpty(requestBody)) {
- String jsonStr = JsonUtils.serialize(requestBody);
+ String jsonStr = Optional.ofNullable(JsonUtils.serialize(requestBody)).orElse("");
httpPost.setEntity(new StringEntity(jsonStr, ContentType.APPLICATION_JSON));
}
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
index 8c29ddcc0..6e61c5b47 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
@@ -44,10 +44,12 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Optional;
import java.util.stream.Collectors;
import org.slf4j.Logger;
@@ -58,6 +60,7 @@ import io.netty.handler.codec.http.HttpRequest;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
@EventMeshTrace(isEnable = false)
public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor implements AsyncHttpProcessor {
@@ -119,9 +122,10 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
//validate body
byte[] requestBody = requestWrapper.getBody();
- Map<String, Object> requestBodyMap = JsonUtils.deserialize(new String(requestBody, Constants.DEFAULT_CHARSET),
- new TypeReference<HashMap<String, Object>>() {
- });
+ Map<String, Object> requestBodyMap = Optional.ofNullable(JsonUtils.deserialize(
+ new String(requestBody, Constants.DEFAULT_CHARSET),
+ new TypeReference<HashMap<String, Object>>() {}
+ )).orElse(Maps.newHashMap());
if (requestBodyMap.get(EventMeshConstants.URL) == null
|| requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC) == null
@@ -165,9 +169,10 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
remoteBodyMap.put(EventMeshConstants.CONSUMER_GROUP, meshGroup);
remoteBodyMap.put(EventMeshConstants.MANAGE_TOPIC, requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC));
- List<String> unSubTopicList =
- JsonUtils.deserialize(JsonUtils.serialize(requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC)), new TypeReference<List<String>>() {
- });
+ List<String> unSubTopicList = Optional.ofNullable(JsonUtils.deserialize(
+ JsonUtils.serialize(requestBodyMap.get(EventMeshConstants.MANAGE_TOPIC)),
+ new TypeReference<List<String>>() {}
+ )).orElse(Collections.emptyList());
String targetMesh = "";
if (!Objects.isNull(requestBodyMap.get("remoteMesh"))) {
@@ -190,8 +195,10 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
String remoteResult = post(closeableHttpClient, targetMesh, remoteHeaderMap, remoteBodyMap,
response -> EntityUtils.toString(response.getEntity(), Constants.DEFAULT_CHARSET));
- Map<String, String> remoteResultMap = JsonUtils.deserialize(remoteResult, new TypeReference<Map<String, String>>() {
- });
+ Map<String, String> remoteResultMap = Optional.ofNullable(JsonUtils.deserialize(
+ remoteResult,
+ new TypeReference<Map<String, String>>() {}
+ )).orElse(Maps.newHashMap());
if (String.valueOf(EventMeshRetCode.SUCCESS.getRetCode()).equals(remoteResultMap.get(EventMeshConstants.RET_CODE))) {
responseBodyMap.put(EventMeshConstants.RET_CODE, EventMeshRetCode.SUCCESS.getRetCode());
@@ -238,7 +245,7 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
//body
if (MapUtils.isNotEmpty(requestBody)) {
- String jsonStr = JsonUtils.serialize(requestBody);
+ String jsonStr = Optional.ofNullable(JsonUtils.serialize(requestBody)).orElse("");
httpPost.setEntity(new StringEntity(jsonStr, ContentType.APPLICATION_JSON));
}
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/ReplyMessageProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/ReplyMessageProcessor.java
index b201f7158..82492b5f3 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/ReplyMessageProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/ReplyMessageProcessor.java
@@ -48,6 +48,7 @@ import org.apache.commons.lang3.StringUtils;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
+import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
@@ -182,8 +183,9 @@ public class ReplyMessageProcessor implements HttpRequestProcessor {
String origTopic = event.getSubject();
- final String replyMQCluster = event.getExtension(EventMeshConstants.PROPERTY_MESSAGE_CLUSTER) == null ? "" :
- event.getExtension(EventMeshConstants.PROPERTY_MESSAGE_CLUSTER).toString();
+ final String replyMQCluster = Optional.ofNullable(event.getExtension(EventMeshConstants.PROPERTY_MESSAGE_CLUSTER))
+ .map(Objects::toString)
+ .orElse("");
if (!org.apache.commons.lang3.StringUtils.isEmpty(replyMQCluster)) {
replyTopic = replyMQCluster + "-" + replyTopic;
} else {
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncRemoteEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncRemoteEventProcessor.java
index 81f062374..ed1726e71 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncRemoteEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncRemoteEventProcessor.java
@@ -48,6 +48,8 @@ import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
@@ -60,6 +62,7 @@ import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpRequest;
import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.collect.Maps;
@EventMeshTrace(isEnable = true)
public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
@@ -70,7 +73,7 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
public Logger aclLogger = LoggerFactory.getLogger(EventMeshConstants.ACL);
- private EventMeshHTTPServer eventMeshHTTPServer;
+ private final EventMeshHTTPServer eventMeshHTTPServer;
public SendAsyncRemoteEventProcessor(EventMeshHTTPServer eventMeshHTTPServer) {
this.eventMeshHTTPServer = eventMeshHTTPServer;
@@ -114,9 +117,10 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
requestWrapper.buildSysHeaderForCE();
// process remote event body
- Map<String, Object> bodyMap = JsonUtils.deserialize(new String(requestWrapper.getBody(), Constants.DEFAULT_CHARSET),
- new TypeReference<Map<String, Object>>() {
- });
+ Map<String, Object> bodyMap = Optional.ofNullable(JsonUtils.deserialize(
+ new String(requestWrapper.getBody(), Constants.DEFAULT_CHARSET),
+ new TypeReference<Map<String, Object>>() {}
+ )).orElse(Maps.newHashMap());
byte[] convertedBody = bodyMap.get("content").toString().getBytes(StandardCharsets.UTF_8);
requestWrapper.setBody(convertedBody);
@@ -174,12 +178,9 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
return;
}
- idc = event.getExtension(ProtocolKey.ClientInstanceKey.IDC) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.IDC).toString();
- String pid = event.getExtension(ProtocolKey.ClientInstanceKey.PID) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.PID).toString();
- String sys = event.getExtension(ProtocolKey.ClientInstanceKey.SYS) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.SYS).toString();
+ idc = getExtension(event, ProtocolKey.ClientInstanceKey.IDC);
+ String pid = getExtension(event, ProtocolKey.ClientInstanceKey.PID);
+ String sys = getExtension(event, ProtocolKey.ClientInstanceKey.SYS);
//validate event-extension
if (StringUtils.isBlank(idc)
@@ -191,8 +192,7 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
return;
}
- String producerGroup = event.getExtension(ProtocolKey.ClientInstanceKey.PRODUCERGROUP) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.PRODUCERGROUP).toString();
+ String producerGroup = getExtension(event, ProtocolKey.ClientInstanceKey.PRODUCERGROUP);
String topic = event.getSubject();
//validate body
@@ -209,13 +209,9 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
//do acl check
if (eventMeshHTTPServer.getEventMeshHttpConfiguration().isEventMeshServerSecurityEnable()) {
String remoteAddr = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
- String user = event.getExtension(ProtocolKey.ClientInstanceKey.USERNAME) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.USERNAME).toString();
- String pass = event.getExtension(ProtocolKey.ClientInstanceKey.PASSWD) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.PASSWD).toString();
- String subsystem =
- event.getExtension(ProtocolKey.ClientInstanceKey.SYS) == null ? "" :
- event.getExtension(ProtocolKey.ClientInstanceKey.SYS).toString();
+ String user = getExtension(event, ProtocolKey.ClientInstanceKey.USERNAME);
+ String pass = getExtension(event, ProtocolKey.ClientInstanceKey.PASSWD);
+ String subsystem = getExtension(event, ProtocolKey.ClientInstanceKey.SYS);
String requestURI = requestWrapper.getRequestURI();
try {
Acl.doAclCheckInHttpSend(remoteAddr, user, pass, subsystem, topic, requestURI);
@@ -319,6 +315,12 @@ public class SendAsyncRemoteEventProcessor implements AsyncHttpProcessor {
}
}
+ private String getExtension(CloudEvent event, String protocolKey) {
+ return Optional.ofNullable(event.getExtension(protocolKey))
+ .map(Objects::toString)
+ .orElse("");
+ }
+
@Override
public String[] paths() {
return new String[]{RequestURI.PUBLISH_BRIDGE.getRequestURI()};
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/inf/AbstractEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/inf/AbstractEventProcessor.java
index da0aa72ea..08a66752d 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/inf/AbstractEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/inf/AbstractEventProcessor.java
@@ -35,10 +35,13 @@ import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.collect.Maps;
+
/**
* EventProcessor
*/
@@ -121,7 +124,9 @@ public class AbstractEventProcessor {
ConsumerGroupMetadata consumerGroupMetadata =
JsonUtils.deserialize(topicMetadataJson, ConsumerGroupMetadata.class);
Map<String, ConsumerGroupTopicMetadata> consumerGroupTopicMetadataMap =
- consumerGroupMetadata.getConsumerGroupTopicMetadataMap();
+ Optional.ofNullable(consumerGroupMetadata)
+ .map(ConsumerGroupMetadata::getConsumerGroupTopicMetadataMap)
+ .orElse(Maps.newConcurrentMap());
for (SubscriptionItem subscriptionItem : subscriptionList) {
if (consumerGroupTopicMetadataMap.containsKey(subscriptionItem.getTopic())) { | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/HandlerService.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncRemoteEventProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalUnSubscribeEventProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/inf/AbstractEventProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/ReplyMessageProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/LocalSubscribeEventProcessor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/BatchSendMessageV2Processor.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteSubscribeEventProcessor.java'] | {'.java': 9} | 9 | 9 | 0 | 0 | 9 | 3,329,516 | 648,163 | 86,603 | 642 | 11,032 | 1,980 | 171 | 9 | 721 | 110 | 159 | 30 | 1 | 0 | 2022-12-18T20:36:26 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,001 | apache/eventmesh/2330/2329 | apache | eventmesh | https://github.com/apache/eventmesh/issues/2329 | https://github.com/apache/eventmesh/pull/2330 | https://github.com/apache/eventmesh/pull/2330 | 1 | fixes | [Bug] usage "in.array()" in Codec.java will cause exception | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
When exception happen while Decoder is decoding inbound stream, code org/apache/eventmesh/common/protocol/tcp/codec/Codec.java:137 will be executed and print out a log, however, because Decoder extends from the ReplayingDecoder, and it will take ReplayingDecoderByteBuf as inbound stream, which will throw UnsupportedOperationException when method array() is being called, causing another exception at this time.
### How to reproduce
1. try to make some exception during the Decoder is decoding inbound stream.
2. Then the exception will be catch and shown in console.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 3c954d838a3c23527444622018bf8882610328e8 | 45dff0e01c97b148fa2b5578523e0de8305e4d09 | https://github.com/apache/eventmesh/compare/3c954d838a3c23527444622018bf8882610328e8...45dff0e01c97b148fa2b5578523e0de8305e4d09 | diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/tcp/codec/Codec.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/tcp/codec/Codec.java
index 296d6c50a..16cc962eb 100644
--- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/tcp/codec/Codec.java
+++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/tcp/codec/Codec.java
@@ -29,7 +29,6 @@ import org.apache.eventmesh.common.utils.JsonUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
-import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import java.util.TimeZone;
@@ -134,7 +133,7 @@ public class Codec {
Package pkg = new Package(header, body);
out.add(pkg);
} catch (Exception e) {
- log.error("decode error| receive: {}.", deserializeBytes(in.array()));
+ log.error("tcp decoder error: ", e);
throw e;
}
} | ['eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/tcp/codec/Codec.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,308,414 | 650,537 | 86,948 | 640 | 175 | 34 | 3 | 1 | 904 | 127 | 193 | 29 | 1 | 0 | 2022-12-01T07:14:44 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,003 | apache/eventmesh/1819/1818 | apache | eventmesh | https://github.com/apache/eventmesh/issues/1818 | https://github.com/apache/eventmesh/pull/1819 | https://github.com/apache/eventmesh/pull/1819 | 1 | fixes | [Bug] Fix IOException in SSLContextFactory | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Mac
### EventMesh version
master
### What happened
Unreported exception IOException must be caught or declared to be thrown.
### How to reproduce
When inputStream.close() is not enclosed in a try-catch block, IOException occurs
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | fcac8f39fef729b239a6c0c3a269cd4d0fe35da8 | 689bc06c38cf5a583765b42c8ae45e59f943ea24 | https://github.com/apache/eventmesh/compare/fcac8f39fef729b239a6c0c3a269cd4d0fe35da8...689bc06c38cf5a583765b42c8ae45e59f943ea24 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java
index 446155a1b..f90c1521b 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java
@@ -25,6 +25,7 @@ import org.apache.eventmesh.runtime.constants.EventMeshConstants;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
+import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
@@ -49,7 +50,7 @@ public class SSLContextFactory {
public static SSLContext getSslContext(EventMeshHTTPConfiguration eventMeshHttpConfiguration) {
SSLContext sslContext;
- InputStream inputStream;
+ InputStream inputStream = null;
try {
protocol = eventMeshHttpConfiguration.eventMeshServerSSLProtocol;
@@ -64,7 +65,7 @@ public class SSLContextFactory {
KeyStore keyStore = KeyStore.getInstance("JKS");
inputStream = Files.newInputStream(Paths.get(EventMeshConstants.EVENTMESH_CONF_HOME
+ File.separator
- + fileName), StandardOpenOption.READ)
+ + fileName), StandardOpenOption.READ);
keyStore.load(inputStream, filePass);
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(keyStore, filePass);
@@ -72,9 +73,14 @@ public class SSLContextFactory {
} catch (Exception e) {
httpLogger.warn("sslContext init failed", e);
sslContext = null;
- } finally {
+ }
+ finally {
if (inputStream != null) {
- inputStream.close();
+ try{
+ inputStream.close();
+ }catch(IOException e){
+ httpLogger.warn("IOException found", e);
+ }
}
}
return sslContext; | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,214,206 | 632,154 | 84,506 | 613 | 548 | 71 | 14 | 1 | 509 | 75 | 119 | 29 | 1 | 0 | 2022-10-24T11:46:19 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,004 | apache/eventmesh/1631/1627 | apache | eventmesh | https://github.com/apache/eventmesh/issues/1627 | https://github.com/apache/eventmesh/pull/1631 | https://github.com/apache/eventmesh/pull/1631 | 1 | fixes | [Bug] ConsumerGroup subscribes multiple topics, only first topic can invoke url | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
When one consumer group subscribes multiple topics, only first subscription can invoke the url.
### How to reproduce
You can subscribe multiple topics by one consumer group, then publish an event with second topic. You will find that first topic works, but the second topic happens nothing.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | ec733b797712ade73b97f11c25fd0bbd797afb43 | 5c691da1d161570e316bd9d94f90e42fcc71bac6 | https://github.com/apache/eventmesh/compare/ec733b797712ade73b97f11c25fd0bbd797afb43...5c691da1d161570e316bd9d94f90e42fcc71bac6 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/ConsumerGroupManager.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/ConsumerGroupManager.java
index 7d4ddab9b..640ed86ac 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/ConsumerGroupManager.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/ConsumerGroupManager.java
@@ -78,6 +78,7 @@ public class ConsumerGroupManager {
}
this.consumerGroupConfig = consumerGroupConfig;
+ this.eventMeshConsumer.setConsumerGroupConf(consumerGroupConfig);
init();
start();
}
diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/EventMeshConsumer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/EventMeshConsumer.java
index c571413a8..a4d57f291 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/EventMeshConsumer.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/EventMeshConsumer.java
@@ -298,6 +298,10 @@ public class EventMeshConsumer {
return consumerGroupConf;
}
+ public void setConsumerGroupConf(ConsumerGroupConf consumerGroupConf) {
+ this.consumerGroupConf = consumerGroupConf;
+ }
+
public EventMeshHTTPServer getEventMeshHTTPServer() {
return eventMeshHTTPServer;
} | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/EventMeshConsumer.java', 'eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/consumer/ConsumerGroupManager.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 3,177,138 | 625,166 | 83,690 | 609 | 212 | 42 | 5 | 2 | 625 | 96 | 137 | 28 | 1 | 0 | 2022-10-17T11:22:33 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,006 | apache/eventmesh/1348/1347 | apache | eventmesh | https://github.com/apache/eventmesh/issues/1347 | https://github.com/apache/eventmesh/pull/1348 | https://github.com/apache/eventmesh/pull/1348 | 1 | fixed | [Bug] Pravega connector writer doesn't close when unsubscribing | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
master
### What happened
Pravega connector writer doesn't close when unsubscribing.
### How to reproduce
Read praveag connector code will find it.
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | 12ed1ac376bd916e9e5de39ea6f7914e81c0ad70 | 49afbf237800705fb42daaff337a8724b1c1e203 | https://github.com/apache/eventmesh/compare/12ed1ac376bd916e9e5de39ea6f7914e81c0ad70...49afbf237800705fb42daaff337a8724b1c1e203 | diff --git a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaClient.java b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaClient.java
index 2b7721a60..25ebb53eb 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaClient.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaClient.java
@@ -19,7 +19,6 @@ package org.apache.eventmesh.connector.pravega.client;
import org.apache.eventmesh.api.EventListener;
import org.apache.eventmesh.api.SendResult;
-import org.apache.eventmesh.connector.pravega.SubscribeTask;
import org.apache.eventmesh.connector.pravega.config.PravegaConnectorConfig;
import org.apache.eventmesh.connector.pravega.exception.PravegaConnectorException;
@@ -84,7 +83,7 @@ public class PravegaClient {
}
public void start() {
- if (PravegaClient.getInstance().createScope()) {
+ if (createScope()) {
log.info("Create Pravega scope[{}] success.", PravegaConnectorConfig.getInstance().getScope());
} else {
log.info("Pravega scope[{}] has already been created.", PravegaConnectorConfig.getInstance().getScope());
@@ -101,6 +100,15 @@ public class PravegaClient {
streamManager.close();
}
+ /**
+ * Publish CloudEvent to Pravega stream named topic. Note that the messageId in SendResult is always -1
+ * since {@link EventStreamWriter#writeEvent(Object)} just return {@link java.util.concurrent.CompletableFuture}
+ * with {@link Void} which couldn't get messageId.
+ *
+ * @param topic topic
+ * @param cloudEvent cloudEvent
+ * @return SendResult whose messageId is always -1
+ */
public SendResult publish(String topic, CloudEvent cloudEvent) {
if (!createStream(topic)) {
log.debug("stream[{}] has already been created.", topic);
@@ -147,6 +155,7 @@ public class PravegaClient {
}
deleteReaderGroup(buildReaderGroup(topic, consumerGroup));
subscribeTaskMap.remove(topic).stopRead();
+ writerMap.remove(topic).close();
return true;
}
diff --git a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaEvent.java b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaEvent.java
index ec8ac1685..4ce37cc60 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaEvent.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaEvent.java
@@ -36,6 +36,8 @@ import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
public class PravegaEvent implements Serializable {
+ private static final long serialVersionUID = 0L;
+
private SpecVersion version;
private String topic;
private String data;
diff --git a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/SubscribeTask.java b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/SubscribeTask.java
similarity index 95%
rename from eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/SubscribeTask.java
rename to eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/SubscribeTask.java
index 17a73de0c..a38e1159f 100644
--- a/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/SubscribeTask.java
+++ b/eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/SubscribeTask.java
@@ -15,12 +15,11 @@
* limitations under the License.
*/
-package org.apache.eventmesh.connector.pravega;
+package org.apache.eventmesh.connector.pravega.client;
import org.apache.eventmesh.api.EventListener;
import org.apache.eventmesh.api.EventMeshAction;
import org.apache.eventmesh.api.EventMeshAsyncConsumeContext;
-import org.apache.eventmesh.connector.pravega.client.PravegaEvent;
import java.util.concurrent.atomic.AtomicBoolean;
| ['eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaEvent.java', 'eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/SubscribeTask.java', 'eventmesh-connector-plugin/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/client/PravegaClient.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 3,103,109 | 612,544 | 81,870 | 591 | 680 | 149 | 15 | 3 | 456 | 67 | 110 | 28 | 1 | 0 | 2022-09-20T10:15:49 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
10,005 | apache/eventmesh/1414/1058 | apache | eventmesh | https://github.com/apache/eventmesh/issues/1058 | https://github.com/apache/eventmesh/pull/1414 | https://github.com/apache/eventmesh/pull/1414 | 1 | fixes | [Bug] Fix json parse exception in RemoteUnsubscribeEventProcessor | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/eventmesh/issues?q=is%3Aissue) and found no similar issues.
### Environment
Windows
### EventMesh version
1.5.0
### What happened
call `/eventmesh/unsubscribe/remote`
### How to reproduce
call `/eventmesh/unsubscribe/remote` with param:
```
{
"url":"http://127.0.0.1:11111",
"consumerGroup":"consumer-group-1111-AC0",
"topic":["bridge-test"],
"remoteMesh":"http://127.0.0.1:10105/eventmesh/unsubscribe/local"
}
```
### Debug logs
_No response_
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR! | b560c2a3a36307bb8ee2a4a0d3f90621f825d50f | 7fb780a465965ce0df213d4d7b25978b182ffe76 | https://github.com/apache/eventmesh/compare/b560c2a3a36307bb8ee2a4a0d3f90621f825d50f...7fb780a465965ce0df213d4d7b25978b182ffe76 | diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
index d54bad565..b3ec56102 100644
--- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
+++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java
@@ -44,10 +44,10 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
-import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.stream.Collectors;
import org.slf4j.Logger;
@@ -113,14 +113,12 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
return;
}
-
//validate body
byte[] requestBody = requestWrapper.getBody();
Map<String, Object> requestBodyMap = JsonUtils.deserialize(new String(requestBody, Constants.DEFAULT_CHARSET),
new TypeReference<HashMap<String, Object>>() {});
-
if (requestBodyMap.get("url") == null || requestBodyMap.get("topic") == null || requestBodyMap.get("consumerGroup") == null) {
handlerSpecific.sendErrorResponse(EventMeshRetCode.EVENTMESH_PROTOCOL_BODY_ERR, responseHeaderMap,
responseBodyMap, null);
@@ -134,7 +132,6 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
long startTime = System.currentTimeMillis();
try {
// request to remote
-
String env = eventMeshHTTPServer.getEventMeshHttpConfiguration().eventMeshEnv;
String idc = eventMeshHTTPServer.getEventMeshHttpConfiguration().eventMeshIDC;
String cluster = eventMeshHTTPServer.getEventMeshHttpConfiguration().eventMeshCluster;
@@ -162,10 +159,13 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
remoteBodyMap.put("consumerGroup", meshGroup);
remoteBodyMap.put("topic", requestBodyMap.get("topic"));
- List<String> unSubTopicList = JsonUtils.deserialize(topic, new TypeReference<List<String>>() {
+ List<String> unSubTopicList = JsonUtils.deserialize(JsonUtils.serialize(requestBodyMap.get("topic")), new TypeReference<List<String>>() {
});
- String targetMesh = requestBodyMap.get("remoteMesh").toString();
+ String targetMesh = "";
+ if (!Objects.isNull(requestBodyMap.get("remoteMesh"))) {
+ targetMesh = requestBodyMap.get("remoteMesh").toString();
+ }
List<SubscriptionItem> subscriptionList = unSubTopicList.stream().map(s -> {
SubscriptionItem subscriptionItem = new SubscriptionItem();
@@ -189,13 +189,11 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
if (String.valueOf(EventMeshRetCode.SUCCESS.getRetCode()).equals(remoteResultMap.get("retCode"))) {
responseBodyMap.put("retCode", EventMeshRetCode.SUCCESS.getRetCode());
responseBodyMap.put("retMsg", EventMeshRetCode.SUCCESS.getErrMsg());
-
handlerSpecific.sendResponse(responseHeaderMap, responseBodyMap);
} else {
handlerSpecific.sendErrorResponse(EventMeshRetCode.EVENTMESH_UNSUBSCRIBE_ERR, responseHeaderMap,
responseBodyMap, null);
}
-
} catch (Exception e) {
long endTime = System.currentTimeMillis();
httpLogger.error(
@@ -205,7 +203,6 @@ public class RemoteUnSubscribeEventProcessor extends AbstractEventProcessor impl
handlerSpecific.sendErrorResponse(EventMeshRetCode.EVENTMESH_UNSUBSCRIBE_ERR, responseHeaderMap,
responseBodyMap, null);
}
-
}
@Override | ['eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/RemoteUnSubscribeEventProcessor.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 3,109,243 | 613,736 | 82,006 | 593 | 606 | 113 | 15 | 1 | 658 | 67 | 179 | 36 | 3 | 1 | 2022-09-25T13:54:31 | 1,415 | Java | {'Java': 3911514, 'Go': 272517, 'Shell': 65032, 'Rust': 49060, 'Makefile': 3761, 'Dockerfile': 1322} | Apache License 2.0 |
241 | ebean-orm/ebean/2904/2903 | ebean-orm | ebean | https://github.com/ebean-orm/ebean/issues/2903 | https://github.com/ebean-orm/ebean/pull/2904 | https://github.com/ebean-orm/ebean/pull/2904 | 1 | fixes | LimitOffsetSqlLimiter applies 'LIMIT 0' if firstRow is set only. | When `Query::setFirstRow` is used, it's expected to produce the OFFSET only. However, it produces LIMIT clause also.
## Example:
```java
someQuery.setFirstRow(5);
```
### Expected behavior
The generated query contains `OFFSET 5` and no `LIMIT` at all
### Actual behavior
The generated query contains `LIMIT 0 OFFSET 5`
I've created PR that fixes this issue: https://github.com/ebean-orm/ebean/pull/2904
| 6c6ec3663a7f450ec281304b19714e281a334772 | 3277fe81a17e91a8c8902ba331fe608992d54a10 | https://github.com/ebean-orm/ebean/compare/6c6ec3663a7f450ec281304b19714e281a334772...3277fe81a17e91a8c8902ba331fe608992d54a10 | diff --git a/ebean-api/src/main/java/io/ebean/config/dbplatform/LimitOffsetSqlLimiter.java b/ebean-api/src/main/java/io/ebean/config/dbplatform/LimitOffsetSqlLimiter.java
index 63534a27f..846ee8056 100644
--- a/ebean-api/src/main/java/io/ebean/config/dbplatform/LimitOffsetSqlLimiter.java
+++ b/ebean-api/src/main/java/io/ebean/config/dbplatform/LimitOffsetSqlLimiter.java
@@ -31,12 +31,12 @@ public class LimitOffsetSqlLimiter implements SqlLimiter {
int firstRow = request.getFirstRow();
int maxRows = request.getMaxRows();
- if (maxRows > 0 || firstRow > 0) {
+ if (maxRows > 0) {
sb.append(" ").append(LIMIT).append(" ").append(maxRows);
- if (firstRow > 0) {
- sb.append(" ").append(OFFSET).append(" ");
- sb.append(firstRow);
- }
+ }
+ if (firstRow > 0) {
+ sb.append(" ").append(OFFSET).append(" ");
+ sb.append(firstRow);
}
String sql = request.getDbPlatform().completeSql(sb.toString(), request.getOrmQuery());
diff --git a/ebean-test/src/test/java/org/tests/basic/TestLimitQuery.java b/ebean-test/src/test/java/org/tests/basic/TestLimitQuery.java
index 63183c7db..090efec8a 100644
--- a/ebean-test/src/test/java/org/tests/basic/TestLimitQuery.java
+++ b/ebean-test/src/test/java/org/tests/basic/TestLimitQuery.java
@@ -40,7 +40,7 @@ public class TestLimitQuery extends BaseTestCase {
String sql = query.getGeneratedSql();
if (isH2()) {
assertThat(sql).contains("offset 3");
- assertThat(sql).contains("limit 0");
+ assertThat(sql).doesNotContain("limit");
}
}
| ['ebean-api/src/main/java/io/ebean/config/dbplatform/LimitOffsetSqlLimiter.java', 'ebean-test/src/test/java/org/tests/basic/TestLimitQuery.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 5,347,927 | 1,253,596 | 195,162 | 1,699 | 291 | 88 | 10 | 1 | 421 | 55 | 108 | 14 | 1 | 1 | 2022-11-30T19:40:20 | 1,397 | Java | {'Java': 8708302, 'PLpgSQL': 281708, 'TSQL': 265741, 'SQLPL': 66211, 'Kotlin': 10173, 'HTML': 7848, 'ANTLR': 4477, 'Shell': 4054, 'CSS': 1446} | Apache License 2.0 |
242 | ebean-orm/ebean/1981/1980 | ebean-orm | ebean | https://github.com/ebean-orm/ebean/issues/1980 | https://github.com/ebean-orm/ebean/pull/1981 | https://github.com/ebean-orm/ebean/pull/1981 | 1 | resolves | NullPointerException in EntityBeanIntercept | ## Expected behavior
Clean execution.
## Actual behavior
Works more than 99.9% of the times, *rarely* a stacktrace with NPE. Approx once every >1'000 execution fails.
I don't think I've seen it more than one time for any unique test case. We have not upgraded production yet, so this is only in tests.
### Steps to reproduce
Not sure. I *suspect* a race condition, but we are completely unable to reproduce it reliably.
We do NOT have any pre-getter interceptors registered, but one IS attached (internal ebean hook).
Could it be because we do multiple saves on the same object instance?
Another possible reason it happens for us is that in tests we do
`serverConfig.backgroundExecutorSchedulePoolSize = 1`, because we want to make sure all background tasks have been finished before moving on to the next test.
The code that blows up is trivial:
```kotlin
if (bean.deleted) {
```
```console
Stacktrace
java.lang.NullPointerException
at io.ebean.bean.EntityBeanIntercept.preGetterCallback(EntityBeanIntercept.java:917)
at io.ebean.bean.EntityBeanIntercept.preGetter(EntityBeanIntercept.java:932)
at OurDBO._ebean_get_deleted(OurDBO.kt:999)
at OurDBO.getDeleted(OurDBO.kt:999)
at OurBeanPersistListener.inserted(OurBeanPersistListener.kt:999)
at io.ebeaninternal.server.core.PersistRequestBean.notifyLocalPersistListener(PersistRequestBean.java:565)
at io.ebeaninternal.server.transaction.PostCommitProcessing.localPersistListenersNotify(PostCommitProcessing.java:160)
at io.ebeaninternal.server.transaction.PostCommitProcessing.lambda$backgroundNotify$0(PostCommitProcessing.java:134)
at io.ebeaninternal.server.core.DefaultBackgroundExecutor.lambda$execute$0(DefaultBackgroundExecutor.java:43)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)```
| 2b0f572f4ddc638b126f1ea7f7ae5430d2ed59f1 | ecdd027b3e4691948d4a0e55f23fde5fde3b666d | https://github.com/ebean-orm/ebean/compare/2b0f572f4ddc638b126f1ea7f7ae5430d2ed59f1...ecdd027b3e4691948d4a0e55f23fde5fde3b666d | diff --git a/src/main/java/io/ebean/bean/EntityBeanIntercept.java b/src/main/java/io/ebean/bean/EntityBeanIntercept.java
index 1b03b47be..837d3db29 100644
--- a/src/main/java/io/ebean/bean/EntityBeanIntercept.java
+++ b/src/main/java/io/ebean/bean/EntityBeanIntercept.java
@@ -913,6 +913,7 @@ public final class EntityBeanIntercept implements Serializable {
}
private void preGetterCallback(int propertyIndex) {
+ PreGetterCallback preGetterCallback = this.preGetterCallback;
if (preGetterCallback != null) {
preGetterCallback.preGetterTrigger(propertyIndex);
} | ['src/main/java/io/ebean/bean/EntityBeanIntercept.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 4,881,197 | 1,141,011 | 181,662 | 1,394 | 66 | 13 | 1 | 1 | 2,000 | 175 | 449 | 38 | 0 | 2 | 2020-03-25T05:54:23 | 1,397 | Java | {'Java': 8708302, 'PLpgSQL': 281708, 'TSQL': 265741, 'SQLPL': 66211, 'Kotlin': 10173, 'HTML': 7848, 'ANTLR': 4477, 'Shell': 4054, 'CSS': 1446} | Apache License 2.0 |
240 | ebean-orm/ebean/1745/1743 | ebean-orm | ebean | https://github.com/ebean-orm/ebean/issues/1743 | https://github.com/ebean-orm/ebean/pull/1745 | https://github.com/ebean-orm/ebean/issues/1743#issuecomment-507031699 | 2 | solves | When using @PrimaryKeyJoinColumn, FK constraint generation cannot be disabled | I have a O2O relation that maps on ID column, so @PrimaryKeyJoinColumn() should be perfect for this
```java
public class Entity1 {
...
@OneToOne
@PrimaryKeyJoinColumn()
private Entity2 entity2 = new Entity2();
}
```
In my case, both entities may exist as their own, so I have to create & save an entity2 before entity1 exists in database, but this is prevented by a FK constraint generated by ebean.
I tried adding `@DbForeignKey(noConstraint = true)` or `@PrimaryKeyJoinColumn(foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT))`, but this has no effect.
## Expected behavior
When using @PrimaryKeyJoinColumn, it should be able to disable FK constraint generation with the `@DbForeignKey` (or `@ForeignKey`) annotation.
## Actual behavior
None of the annotations is honored, a FK constraint is always generated
## Possible fix
```diff
diff --git a/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java b/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
index 5c19323b4..45c4adace 100644
--- a/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
+++ b/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
@@ -1732,6 +1732,9 @@ public class BeanDescriptorManager implements BeanDescriptorMap {
private void addPrimaryKeyJoin(DeployBeanPropertyAssocOne<?> prop) {
+ if (prop.getForeignKey() != null && prop.getForeignKey().isNoConstraint()) {
+ return;
+ }
String baseTable = prop.getDesc().getBaseTable();
DeployTableJoin inverse = prop.getTableJoin().createInverse(baseTable);
```
This will, at least give me control, if FK constraint should be generated or not (it does not add full support for `@DbForeignKey` (onDelete / onUpdate). Maybe it would be better to support only `@ForeignKey` in this case which is not yet parsed from the @PrimaryKeyJoinColumn annotation yet.
@rbygrave what do you think would be the better option here? | d49a7c415f01d44a054f7927c811666ec528bb8d | 7f2e4f54ff6cb65cf20f400cc1561db26cbe5abd | https://github.com/ebean-orm/ebean/compare/d49a7c415f01d44a054f7927c811666ec528bb8d...7f2e4f54ff6cb65cf20f400cc1561db26cbe5abd | diff --git a/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildIntersectionTable.java b/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildIntersectionTable.java
index 9c2cf29ec..e117c3ead 100644
--- a/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildIntersectionTable.java
+++ b/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildIntersectionTable.java
@@ -49,8 +49,7 @@ class ModelBuildIntersectionTable {
private void buildFkConstraints() {
- PropertyForeignKey foreignKey = manyProp.getForeignKey();
- if (foreignKey == null || !foreignKey.isNoConstraint()) {
+ if (manyProp.hasForeignKeyConstraint()) {
ctx.fkeyBuilder(intersectionTable)
.addForeignKey(manyProp.getBeanDescriptor(), intersectionTableJoin, true)
.addForeignKey(manyProp.getTargetDescriptor(), tableJoin, false);
diff --git a/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildPropertyVisitor.java b/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildPropertyVisitor.java
index fa7950b45..338941122 100644
--- a/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildPropertyVisitor.java
+++ b/src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildPropertyVisitor.java
@@ -170,8 +170,6 @@ public class ModelBuildPropertyVisitor extends BaseTablePropertyVisitor {
List<MColumn> modelColumns = new ArrayList<>(columns.length);
- PropertyForeignKey foreignKey = p.getForeignKey();
-
MCompoundForeignKey compoundKey = null;
if (columns.length > 1) {
// compound foreign key
@@ -196,7 +194,7 @@ public class ModelBuildPropertyVisitor extends BaseTablePropertyVisitor {
col.setDbMigrationInfos(p.getDbMigrationInfos());
col.setDefaultValue(p.getDbColumnDefault());
if (columns.length == 1) {
- if (p.hasForeignKey() && !importedProperty.getBeanDescriptor().suppressForeignKey()) {
+ if (p.hasForeignKeyConstraint() && !importedProperty.getBeanDescriptor().suppressForeignKey()) {
// single references column (put it on the column)
String refTable = importedProperty.getBeanDescriptor().getBaseTable();
if (refTable == null) {
@@ -208,6 +206,7 @@ public class ModelBuildPropertyVisitor extends BaseTablePropertyVisitor {
if (p.hasForeignKeyIndex()) {
col.setForeignKeyIndex(determineForeignKeyIndexName(col.getName()));
}
+ PropertyForeignKey foreignKey = p.getForeignKey();
if (foreignKey != null) {
col.setForeignKeyModes(foreignKey.getOnDelete(), foreignKey.getOnUpdate());
}
@@ -256,9 +255,15 @@ public class ModelBuildPropertyVisitor extends BaseTablePropertyVisitor {
}
TableJoin primaryKeyJoin = p.getBeanDescriptor().getPrimaryKeyJoin();
if (primaryKeyJoin != null && !table.isPartitioned()) {
- TableJoinColumn[] columns = primaryKeyJoin.columns();
- col.setReferences(primaryKeyJoin.getTable() + "." + columns[0].getForeignDbColumn());
- col.setForeignKeyName(determineForeignKeyConstraintName(col.getName()));
+ final PropertyForeignKey foreignKey = primaryKeyJoin.getForeignKey();
+ if (foreignKey == null || !foreignKey.isNoConstraint()) {
+ TableJoinColumn[] columns = primaryKeyJoin.columns();
+ col.setReferences(primaryKeyJoin.getTable() + "." + columns[0].getForeignDbColumn());
+ col.setForeignKeyName(determineForeignKeyConstraintName(col.getName()));
+ if (foreignKey != null) {
+ col.setForeignKeyModes(foreignKey.getOnDelete(), foreignKey.getOnUpdate());
+ }
+ }
}
} else {
col.setDefaultValue(p.getDbColumnDefault());
diff --git a/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java b/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
index 6c2ab9114..cca2e4e7e 100644
--- a/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
+++ b/src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java
@@ -1654,8 +1654,7 @@ public class BeanDescriptorManager implements BeanDescriptorMap {
String baseTable = prop.getDesc().getBaseTable();
DeployTableJoin inverse = prop.getTableJoin().createInverse(baseTable);
- TableJoin inverseJoin = new TableJoin(inverse);
-
+ TableJoin inverseJoin = new TableJoin(inverse, prop.getForeignKey());
DeployBeanInfo<?> target = deployInfoMap.get(prop.getTargetType());
target.setPrimaryKeyJoin(inverseJoin);
}
diff --git a/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssoc.java b/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssoc.java
index b8915f710..a03d1e713 100644
--- a/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssoc.java
+++ b/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssoc.java
@@ -155,6 +155,20 @@ public abstract class BeanPropertyAssoc<T> extends BeanProperty implements STree
return foreignKey;
}
+ /**
+ * Return true if foreign key constraint is enabled on this relationship (not disabled).
+ */
+ public boolean hasForeignKeyConstraint() {
+ return foreignKey == null || !foreignKey.isNoConstraint();
+ }
+
+ /**
+ * Return true if foreign key index is enabled on this relationship (not disabled).
+ */
+ public boolean hasForeignKeyIndex() {
+ return foreignKey == null || !foreignKey.isNoIndex();
+ }
+
/**
* Create a ElPropertyValue for a *ToOne or *ToMany.
*/
@@ -228,7 +242,7 @@ public abstract class BeanPropertyAssoc<T> extends BeanProperty implements STree
/**
* Create a new query for the target type.
- *
+ * <p>
* We use target descriptor rather than target property type to support ElementCollection.
*/
public SpiQuery<T> newQuery(SpiEbeanServer server) {
diff --git a/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssocOne.java b/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssocOne.java
index c2cc6c843..5da1a43df 100644
--- a/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssocOne.java
+++ b/src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssocOne.java
@@ -45,6 +45,7 @@ public class BeanPropertyAssocOne<T> extends BeanPropertyAssoc<T> implements STr
private final boolean orphanRemoval;
private final boolean primaryKeyExport;
+ private final boolean primaryKeyJoin;
private AssocOneHelp localHelp;
@@ -75,6 +76,7 @@ public class BeanPropertyAssocOne<T> extends BeanPropertyAssoc<T> implements STr
super(descriptor, deploy);
primaryKeyExport = deploy.isPrimaryKeyExport();
+ primaryKeyJoin = deploy.isPrimaryKeyJoin();
oneToOne = deploy.isOneToOne();
oneToOneExported = deploy.isOneToOneExported();
orphanRemoval = deploy.isOrphanRemoval();
@@ -100,6 +102,7 @@ public class BeanPropertyAssocOne<T> extends BeanPropertyAssoc<T> implements STr
public BeanPropertyAssocOne(BeanPropertyAssocOne source, BeanPropertyOverride override) {
super(source, override);
primaryKeyExport = source.primaryKeyExport;
+ primaryKeyJoin = source.primaryKeyJoin;
oneToOne = source.oneToOne;
oneToOneExported = source.oneToOneExported;
orphanRemoval = source.orphanRemoval;
@@ -357,11 +360,7 @@ public class BeanPropertyAssocOne<T> extends BeanPropertyAssoc<T> implements STr
}
public boolean hasForeignKey() {
- return foreignKey == null || !foreignKey.isNoConstraint();
- }
-
- public boolean hasForeignKeyIndex() {
- return foreignKey == null || !foreignKey.isNoIndex();
+ return foreignKey == null || primaryKeyJoin || !foreignKey.isNoConstraint();
}
/**
diff --git a/src/main/java/io/ebeaninternal/server/deploy/TableJoin.java b/src/main/java/io/ebeaninternal/server/deploy/TableJoin.java
index 03cb35d6c..5d805aa20 100644
--- a/src/main/java/io/ebeaninternal/server/deploy/TableJoin.java
+++ b/src/main/java/io/ebeaninternal/server/deploy/TableJoin.java
@@ -33,11 +33,17 @@ public final class TableJoin {
*/
private final int queryHash;
+ private final PropertyForeignKey foreignKey;
+
+ public TableJoin(DeployTableJoin deploy) {
+ this(deploy, null);
+ }
+
/**
* Create a TableJoin.
*/
- public TableJoin(DeployTableJoin deploy) {
-
+ public TableJoin(DeployTableJoin deploy, PropertyForeignKey foreignKey) {
+ this.foreignKey = foreignKey;
this.table = InternString.intern(deploy.getTable());
this.type = deploy.getType();
this.inheritInfo = deploy.getInheritInfo();
@@ -52,6 +58,7 @@ public final class TableJoin {
}
private TableJoin(TableJoin source, String overrideColumn) {
+ this.foreignKey = null;
this.table = source.table;
this.type = source.type;
this.inheritInfo = source.inheritInfo;
@@ -96,14 +103,6 @@ public final class TableJoin {
return true;
}
-
- /**
- * Return a hash value for adding to a query plan.
- */
- public int queryHash() {
- return queryHash;
- }
-
@Override
public String toString() {
StringBuilder sb = new StringBuilder(30);
@@ -114,6 +113,13 @@ public final class TableJoin {
return sb.toString();
}
+ /**
+ * Return the foreign key options.
+ */
+ public PropertyForeignKey getForeignKey() {
+ return foreignKey;
+ }
+
/**
* Return the join columns.
*/
diff --git a/src/test/java/org/tests/model/onetoone/OtoUPrime.java b/src/test/java/org/tests/model/onetoone/OtoUPrime.java
index 4b2bd894b..a8da23578 100644
--- a/src/test/java/org/tests/model/onetoone/OtoUPrime.java
+++ b/src/test/java/org/tests/model/onetoone/OtoUPrime.java
@@ -1,5 +1,7 @@
package org.tests.model.onetoone;
+import io.ebean.annotation.DbForeignKey;
+
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToOne;
@@ -21,6 +23,7 @@ public class OtoUPrime {
*/
@OneToOne
@PrimaryKeyJoinColumn
+ @DbForeignKey(noConstraint = true)
OtoUPrimeExtra extra;
@Version
@@ -32,7 +35,7 @@ public class OtoUPrime {
@Override
public String toString() {
- return "id:"+ pid +" name:"+name+" extra:"+extra;
+ return "id:" + pid + " name:" + name + " extra:" + extra;
}
public UUID getPid() { | ['src/test/java/org/tests/model/onetoone/OtoUPrime.java', 'src/main/java/io/ebeaninternal/server/deploy/TableJoin.java', 'src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssocOne.java', 'src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildPropertyVisitor.java', 'src/main/java/io/ebeaninternal/server/deploy/BeanPropertyAssoc.java', 'src/main/java/io/ebeaninternal/dbmigration/model/build/ModelBuildIntersectionTable.java', 'src/main/java/io/ebeaninternal/server/deploy/BeanDescriptorManager.java'] | {'.java': 7} | 7 | 7 | 0 | 0 | 7 | 4,788,774 | 1,118,178 | 178,448 | 1,349 | 2,811 | 583 | 74 | 6 | 2,007 | 231 | 464 | 37 | 0 | 2 | 2019-06-30T12:08:02 | 1,397 | Java | {'Java': 8708302, 'PLpgSQL': 281708, 'TSQL': 265741, 'SQLPL': 66211, 'Kotlin': 10173, 'HTML': 7848, 'ANTLR': 4477, 'Shell': 4054, 'CSS': 1446} | Apache License 2.0 |
995 | knowm/xchart/396/389 | knowm | xchart | https://github.com/knowm/XChart/issues/389 | https://github.com/knowm/XChart/pull/396 | https://github.com/knowm/XChart/pull/396 | 1 | resolves | Zoom function improvement: Don't redraw when no points are selected. | When selection an area on the chart between data points the chart should not be redrawn. | 933494a62bc07d31115632ae4da8c6203cb865d0 | 756e2f0c3e1b89f9fc8bd555dc4accba76106813 | https://github.com/knowm/xchart/compare/933494a62bc07d31115632ae4da8c6203cb865d0...756e2f0c3e1b89f9fc8bd555dc4accba76106813 | diff --git a/xchart/src/main/java/org/knowm/xchart/XYChart.java b/xchart/src/main/java/org/knowm/xchart/XYChart.java
index 30951e3a..64fd98d3 100644
--- a/xchart/src/main/java/org/knowm/xchart/XYChart.java
+++ b/xchart/src/main/java/org/knowm/xchart/XYChart.java
@@ -465,9 +465,15 @@ public class XYChart extends Chart<XYStyler, XYSeries> {
double minValue = getChartXFromCoordinate(screenXmin);
double maxValue = getChartXFromCoordinate(screenXmax);
boolean filtered = false;
- for (XYSeries series : getSeriesMap().values()) {
- boolean f = series.filterXByValue(minValue, maxValue);
- if (f) {
+ if (isOnePointSeleted(minValue, maxValue)) {
+ for (XYSeries series : getSeriesMap().values()) {
+ boolean f = series.filterXByValue(minValue, maxValue);
+ if (f) {
+ filtered = true;
+ }
+ }
+ } else {
+ if (!isAllPointsSelected()) {
filtered = true;
}
}
@@ -480,4 +486,44 @@ public class XYChart extends Chart<XYStyler, XYSeries> {
series.filterXByIndex(startIndex, endIndex);
}
}
+
+ /**
+ * Is there a point selected in all series.
+ *
+ * @param minValue
+ * @param maxValue
+ * @return
+ */
+ private boolean isOnePointSeleted(double minValue, double maxValue) {
+
+ boolean isOnePointSeleted = false;
+ double[] xData = null;
+ for (XYSeries series : getSeriesMap().values()) {
+ xData = series.getXData();
+ for (double x : xData) {
+ if (x >= minValue && x <= maxValue) {
+ isOnePointSeleted = true;
+ break;
+ }
+ }
+ }
+ return isOnePointSeleted;
+ }
+
+ /**
+ * Whether all points are selected in all series.
+ *
+ * @return
+ */
+ private boolean isAllPointsSelected() {
+
+ boolean isAllPointsSelected = true;
+ for (XYSeries series : getSeriesMap().values()) {
+ if (!series.isAllXData()) {
+ isAllPointsSelected = false;
+ break;
+ }
+ }
+ return isAllPointsSelected;
+ }
}
diff --git a/xchart/src/main/java/org/knowm/xchart/internal/series/AxesChartSeriesNumericalNoErrorBars.java b/xchart/src/main/java/org/knowm/xchart/internal/series/AxesChartSeriesNumericalNoErrorBars.java
index f39fe801..97e64b3d 100644
--- a/xchart/src/main/java/org/knowm/xchart/internal/series/AxesChartSeriesNumericalNoErrorBars.java
+++ b/xchart/src/main/java/org/knowm/xchart/internal/series/AxesChartSeriesNumericalNoErrorBars.java
@@ -233,6 +233,16 @@ public abstract class AxesChartSeriesNumericalNoErrorBars extends MarkerSeries {
return new double[] {min, max};
}
+ /**
+ * Is xData.length equal to xDataAll.length
+ *
+ * @return true: equal; false: not equal
+ */
+ public boolean isAllXData() {
+
+ return xData.length == xDataAll.length;
+ }
+
public double[] getXData() {
return xData; | ['xchart/src/main/java/org/knowm/xchart/XYChart.java', 'xchart/src/main/java/org/knowm/xchart/internal/series/AxesChartSeriesNumericalNoErrorBars.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 913,333 | 229,343 | 32,423 | 272 | 1,546 | 392 | 62 | 2 | 88 | 16 | 18 | 1 | 0 | 0 | 2020-01-08T06:39:45 | 1,384 | Java | {'Java': 1050156} | Apache License 2.0 |
993 | knowm/xchart/717/601 | knowm | xchart | https://github.com/knowm/XChart/issues/601 | https://github.com/knowm/XChart/pull/717 | https://github.com/knowm/XChart/pull/717 | 1 | fixes | IndexOutOfBoundsException introduced in 3.7.0 or 3.8.0 | Hi, attached date+value series can be plotted in 3.6.6 without any errors. Yet in 3.8.0 (cannot use 3.7.0 as this release doesn't contain some of the class that 3.6.6 and 3.8.0 have) I got:
`java.lang.IndexOutOfBoundsException: Index 46 out of bounds for length 46
at java.base/jdk.internal.util.Preconditions.outOfBounds(Preconditions.java:64)
at java.base/jdk.internal.util.Preconditions.outOfBoundsCheckIndex(Preconditions.java:70)
at java.base/jdk.internal.util.Preconditions.checkIndex(Preconditions.java:248)
at java.base/java.util.Objects.checkIndex(Objects.java:372)
at java.base/java.util.ArrayList.get(ArrayList.java:458)
at org.knowm.xchart.internal.chartpart.AxisTickCalculator_Date.calculate(AxisTickCalculator_Date.java:201)
at org.knowm.xchart.internal.chartpart.AxisTickCalculator_Date.<init>(AxisTickCalculator_Date.java:103)
at org.knowm.xchart.internal.chartpart.Axis.getAxisTickCalculator(Axis.java:454)
at org.knowm.xchart.internal.chartpart.Axis.getXAxisHeightHint(Axis.java:289)
at org.knowm.xchart.internal.chartpart.Axis.preparePaint(Axis.java:155)
at org.knowm.xchart.internal.chartpart.AxisPair.paint(AxisPair.java:121)
at org.knowm.xchart.XYChart.paint(XYChart.java:416)
at org.knowm.xchart.BitmapEncoder.getBufferedImage(BitmapEncoder.java:281)
at org.knowm.xchart.BitmapEncoder.saveBitmap(BitmapEncoder.java:134)
at org.knowm.xchart.BitmapEncoder.saveBitmap(BitmapEncoder.java:109)
`
which corresponds to unverified use of index in a loop here:
`double gridStep =
timeSpans.get(++index).getUnitAmount()
* timeSpans.get(index).getMagnitude(); // in time units (ms)
*
`
This error most probably has been introduced as part of this fix: https://github.com/knowm/XChart/issues/527 (@timmolter)
example data that leads to this error:
[in.csv](https://github.com/knowm/XChart/files/6569374/in.csv)
| a93e6e81db19361ea18492d3a6697979f2c5631b | 5794e4656d7d8004465429e3e3acc6a44837157f | https://github.com/knowm/xchart/compare/a93e6e81db19361ea18492d3a6697979f2c5631b...5794e4656d7d8004465429e3e3acc6a44837157f | diff --git a/xchart/src/main/java/org/knowm/xchart/internal/chartpart/AxisTickCalculator_Date.java b/xchart/src/main/java/org/knowm/xchart/internal/chartpart/AxisTickCalculator_Date.java
index 226c2350..f19ead80 100644
--- a/xchart/src/main/java/org/knowm/xchart/internal/chartpart/AxisTickCalculator_Date.java
+++ b/xchart/src/main/java/org/knowm/xchart/internal/chartpart/AxisTickCalculator_Date.java
@@ -242,6 +242,9 @@ class AxisTickCalculator_Date extends AxisTickCalculator_ {
// }
}
// System.out.println("************");
+ if(index >= timeSpans.size() - 1)
+ break; // We reached the end, even though we might not yet have the result we want,
+ // continuing will lead to an Exception!
} while (skip
|| !areAllTickLabelsUnique(tickLabels)
|| !willLabelsFitInTickSpaceHint(tickLabels, gridStepInChartSpace)); | ['xchart/src/main/java/org/knowm/xchart/internal/chartpart/AxisTickCalculator_Date.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,020,593 | 258,657 | 36,409 | 303 | 190 | 43 | 3 | 1 | 1,916 | 120 | 481 | 35 | 2 | 0 | 2023-01-01T01:11:20 | 1,384 | Java | {'Java': 1050156} | Apache License 2.0 |
994 | knowm/xchart/495/494 | knowm | xchart | https://github.com/knowm/XChart/issues/494 | https://github.com/knowm/XChart/pull/495 | https://github.com/knowm/XChart/pull/495 | 1 | resolves | Impossible to save jpg images | After resolving issue 488 adding alpha channels, it is no longer possible to save jpg files. This (implementation of this) format doesn't support alpha channels. | bc1df5dd6e05a1bc5bbc872e5b4306ee89bd2523 | 196ef5a62bfb599a768be936973267b8ef445a6b | https://github.com/knowm/xchart/compare/bc1df5dd6e05a1bc5bbc872e5b4306ee89bd2523...196ef5a62bfb599a768be936973267b8ef445a6b | diff --git a/xchart-demo/src/main/java/org/knowm/xchart/standalone/issues/TestForIssue488.java b/xchart-demo/src/main/java/org/knowm/xchart/standalone/issues/TestForIssue488.java
index 144ac4a2..9c5d160d 100644
--- a/xchart-demo/src/main/java/org/knowm/xchart/standalone/issues/TestForIssue488.java
+++ b/xchart-demo/src/main/java/org/knowm/xchart/standalone/issues/TestForIssue488.java
@@ -28,6 +28,12 @@ public class TestForIssue488 {
chart.getStyler().setSeriesColors(sliceColors);
chart.getStyler().setChartTitleBoxVisible(false);
chart.getStyler().setHasAnnotations(false);
+ chart.getStyler().setChartBackgroundColor(new Color(255, 255, 255));
+ // chart.getStyler().setChartBackgroundColor(new Color(0, 255, 0, 25));
+ // chart.getStyler().setChartBackgroundColor(new Color(0, 255, 0));
+ chart.getStyler().setPlotBackgroundColor(new Color(255, 255, 255));
+// chart.getStyler().setPlotBackgroundColor(new Color(0, 0, 255, 20));
+// chart.getStyler().setPlotBackgroundColor(new Color(0, 0, 255));
// chart.getStyler().setChartBackgroundColor(new Color(255, 255, 255));
// chart.getStyler().setChartBackgroundColor(new Color(0, 255, 0, 25));
// chart.getStyler().setChartBackgroundColor(new Color(0, 255, 0));
diff --git a/xchart/src/main/java/org/knowm/xchart/BitmapEncoder.java b/xchart/src/main/java/org/knowm/xchart/BitmapEncoder.java
index d6670b59..2e9cf41c 100644
--- a/xchart/src/main/java/org/knowm/xchart/BitmapEncoder.java
+++ b/xchart/src/main/java/org/knowm/xchart/BitmapEncoder.java
@@ -275,7 +275,7 @@ public final class BitmapEncoder {
public static BufferedImage getBufferedImage(Chart chart) {
BufferedImage bufferedImage =
- new BufferedImage(chart.getWidth(), chart.getHeight(), BufferedImage.TYPE_INT_ARGB);
+ new BufferedImage(chart.getWidth(), chart.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2D = bufferedImage.createGraphics();
chart.paint(graphics2D, chart.getWidth(), chart.getHeight());
return bufferedImage; | ['xchart/src/main/java/org/knowm/xchart/BitmapEncoder.java', 'xchart-demo/src/main/java/org/knowm/xchart/standalone/issues/TestForIssue488.java'] | {'.java': 2} | 2 | 2 | 0 | 0 | 2 | 1,022,804 | 258,076 | 36,666 | 302 | 642 | 167 | 8 | 2 | 161 | 25 | 32 | 1 | 0 | 0 | 2020-08-10T07:56:30 | 1,384 | Java | {'Java': 1050156} | Apache License 2.0 |
154 | mit-cml/appinventor-sources/2010/2009 | mit-cml | appinventor-sources | https://github.com/mit-cml/appinventor-sources/issues/2009 | https://github.com/mit-cml/appinventor-sources/pull/2010 | https://github.com/mit-cml/appinventor-sources/pull/2010 | 2 | fixes | Build error when compiling official Look and PersonalImageClassifier extensions | **Describe the bug**
Compilation of official Look and PersonalImageClassifier extensions consistently fails.
**Affects**
Look and PersonalImageClassifier extensions cannot be compiled for use with the latest AppInventor source code.
- [ ] Designer
- [ ] Blocks editor
- [ ] Companion
- [ ] Compiled apps
- [ ] Buildserver
- [x] Other... (please describe)
**Expected behavior**
Look and PersonalImageClassifier extensions can be successfully built, and corresponding aix files are located at appinventor-sources/appinventor/components/build/extensions
**Steps to reproduce**
step 1: check out App Inventor source code from its main github branch (the latest code)
step 2: check out Look and PersonalImageClassifier extensions source code from official github
step 3: copy Look and PersonalImageClassifier source code to appinventor-sources/appinventor/components/src/edu/mit/appinventor/ai/look and appinventor-sources/appinventor/components/src/edu/mit/appinventor/ai/personalimageclassifier respectively
step 4: cd appinventor-sources/appinventor/components/
step 5: execute "ant extensions"
step 6: build error as follows -
ExternalComponentGenerator:
[mkdir] Created dir: /home/ubuntu/appinventor-sources/appinventor/components/build/classes/ExternalComponentGenerator
[mkdir] Created dir: /home/ubuntu/appinventor-sources/appinventor/components/build/externalComponents
[mkdir] Created dir: /home/ubuntu/appinventor-sources/appinventor/components/build/externalComponents-class
[javac] Compiling 1 source file to /home/ubuntu/appinventor-sources/appinventor/components/build/classes/ExternalComponentGenerator
[javac] warning: [options] bootstrap class path not set in conjunction with -source 1.7
[javac] 1 warning
[java]
[java] Extensions : Generating extensions
[java]
[java] Extensions : Generating files [edu.mit.appinventor.ai.personalimageclassifier]
[java] Extensions : Copying file /home/ubuntu/appinventor-sources/appinventor/components/build/externalComponents/../../src/edu/mit/appinventor/ai/personalimageclassifier/aiwebres/glasses.png
[java] java.lang.StringIndexOutOfBoundsException: String index out of range: -1
[java] at org.apache.tools.ant.taskdefs.ExecuteJava.execute(ExecuteJava.java:194)
[java] at org.apache.tools.ant.taskdefs.Java.run(Java.java:861)
[java] at org.apache.tools.ant.taskdefs.Java.executeJava(Java.java:231)
[java] at org.apache.tools.ant.taskdefs.Java.executeJava(Java.java:135)
[java] at org.apache.tools.ant.taskdefs.Java.execute(Java.java:108)
[java] at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:292)
[java] at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
[java] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[java] at java.lang.reflect.Method.invoke(Method.java:498)
[java] at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:99)
[java] at org.apache.tools.ant.Task.perform(Task.java:350)
[java] at org.apache.tools.ant.Target.execute(Target.java:449)
[java] at org.apache.tools.ant.Target.performTasks(Target.java:470)
[java] at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1388)
[java] at org.apache.tools.ant.helper.SingleCheckExecutor.executeTargets(SingleCheckExecutor.java:36)
[java] at org.apache.tools.ant.Project.executeTargets(Project.java:1251)
[java] at org.apache.tools.ant.taskdefs.Ant.execute(Ant.java:437)
[java] at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:292)
[java] at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
[java] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[java] at java.lang.reflect.Method.invoke(Method.java:498)
[java] at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:99)
[java] at org.apache.tools.ant.Task.perform(Task.java:350)
[java] at org.apache.tools.ant.Target.execute(Target.java:449)
[java] at org.apache.tools.ant.Target.performTasks(Target.java:470)
[java] at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1388)
[java] at org.apache.tools.ant.Project.executeTarget(Project.java:1361)
[java] at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
[java] at org.apache.tools.ant.Project.executeTargets(Project.java:1251)
[java] at org.apache.tools.ant.Main.runBuild(Main.java:834)
[java] at org.apache.tools.ant.Main.startAnt(Main.java:223)
[java] at org.apache.tools.ant.launch.Launcher.run(Launcher.java:284)
[java] at org.apache.tools.ant.launch.Launcher.main(Launcher.java:101)
[java] Caused by: java.lang.StringIndexOutOfBoundsException: String index out of range: -1
[java] at java.lang.String.substring(String.java:1967)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.getClassPackage(ExternalComponentGenerator.java:388)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.isRelatedExternalClass(ExternalComponentGenerator.java:376)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.copyRelatedExternalClasses(ExternalComponentGenerator.java:348)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.generateExternalComponentBuildFiles(ExternalComponentGenerator.java:163)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.generateAllExtensions(ExternalComponentGenerator.java:113)
[java] at com.google.appinventor.components.scripts.ExternalComponentGenerator.main(ExternalComponentGenerator.java:71)
[java] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[java] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[java] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[java] at java.lang.reflect.Method.invoke(Method.java:498)
[java] at org.apache.tools.ant.taskdefs.ExecuteJava.run(ExecuteJava.java:218)
[java] at org.apache.tools.ant.taskdefs.ExecuteJava.execute(ExecuteJava.java:155)
[java] ... 32 more
[java] Java Result: -1 | d2357d1b0111e3559e6cd0f85e336d4a19511fd4 | 7609a45a2fdd49f5f4bd29ecff1899fd01cd00e4 | https://github.com/mit-cml/appinventor-sources/compare/d2357d1b0111e3559e6cd0f85e336d4a19511fd4...7609a45a2fdd49f5f4bd29ecff1899fd01cd00e4 | diff --git a/appinventor/components/src/com/google/appinventor/components/scripts/ExternalComponentGenerator.java b/appinventor/components/src/com/google/appinventor/components/scripts/ExternalComponentGenerator.java
index 7cee62565..041f90d03 100644
--- a/appinventor/components/src/com/google/appinventor/components/scripts/ExternalComponentGenerator.java
+++ b/appinventor/components/src/com/google/appinventor/components/scripts/ExternalComponentGenerator.java
@@ -371,6 +371,9 @@ public class ExternalComponentGenerator {
* {@code extensionPackage}, {@code false} otherwise
*/
private static boolean isRelatedExternalClass(final String testClassAbsolutePath, final String extensionPackage ) {
+ if (!testClassAbsolutePath.endsWith(".class")) { // Ignore things that aren't class files...
+ return false;
+ }
String componentPackagePath = extensionPackage.replace(".", File.separator);
String testClassPath = getClassPackage(testClassAbsolutePath); | ['appinventor/components/src/com/google/appinventor/components/scripts/ExternalComponentGenerator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,300,226 | 1,397,686 | 175,902 | 859 | 126 | 27 | 3 | 1 | 6,490 | 356 | 1,540 | 95 | 0 | 0 | 2020-01-31T17:30:34 | 1,331 | Java | {'Java': 8910424, 'HTML': 1461396, 'Swift': 1035926, 'Objective-C': 647084, 'JavaScript': 637622, 'Scheme': 277513, 'Python': 205314, 'CSS': 170862, 'Shell': 16219, 'SCSS': 12605, 'Batchfile': 4359, 'C': 3930, 'AMPL': 3281, 'Ruby': 3174, 'Lex': 1316, 'Yacc': 1103, 'Dockerfile': 535, 'Makefile': 96} | Apache License 2.0 |
151 | mit-cml/appinventor-sources/2427/2423 | mit-cml | appinventor-sources | https://github.com/mit-cml/appinventor-sources/issues/2423 | https://github.com/mit-cml/appinventor-sources/pull/2427 | https://github.com/mit-cml/appinventor-sources/pull/2427#issuecomment-791674320 | 1 | fixes | Need to Re-write the POP_FIRST_SCRIPT (LUA Script) used by CloudDB | <a id="org7d01aab"></a>
# TL;DR
We broke (slightly) CloudDB when we introduced dictionaries.
Read on for the nitty gritty details.
<a id="org632c49e"></a>
# Background
We use Redis software to implement CloudDB storage. The CloudDB server
itself provides a front end that enforces confidentiality (aka
encryption in transport) as well as isolation between users (so you
can only mess with your variables).
Redis is a key value store with a simple Publication/Subscription (aka pub/sub)
event system built in. CloudDB variables are keys in the Redis store.
However, the pub/sub system isn’t sufficient for our needs in
CloudDB. In particular when a variable changes, it emits an event, but
the event itself doesn’t provide the new value. So, you have to then do
a GET to get the value. However, in apps that are making many updates
(like our Sketch and Guess Tutorial) you can lose updates in the time
between the event is triggered, and you manage to do a GET.
Fortunately, Redis implements scripting using the LUA language. The LUA
implementation permits you do to “atomic” operations within Redis. So
when CloudDB does a StoreValue, we do not simply turn that into a
Redis SET. Instead, we invoke a LUA script which does the set and emits
an event with the new value. It’s kind of cool actually.
Now we do limit which scripts can be run to avoid a security hole, but
that isn’t at issue here.
The “Append” and “Remove First” functionality is also implemented as
LUA scripts for the same reason. The low-level implementation stores
the list as a Redis “table” object. We use JSON to encode the table
when we return it to CloudDB. For some reason that I don’t understand
(yet), Redis internally JSON encodes a table with contents as a list
(good, this works for us) but an empty table is returned as an empty
JSON “object” (aka {}) instead of an empty list (aka []).
Prior to our support for dictionaries, we silently turned this empty
object {} into an empty list [] because we didn’t support
dictionaries, which are represented as JSON objects.
Since we added support for dictionaries, we no longer do this
conversion.
Welcome to the land of unintended consequences. This is a classic
example. We were not aware of this Redis “feature” and prior to our
support for dictionaries, it was completely benign for us.
<a id="orge6a36f1"></a>
# How to fix this
The correct fix is for us to update the LUA script used in the
POP_FIRST LUA Script to test for the empty object and turn it into an
empty list. However, this is a component change, which means a new
Companion and all that entails.
I’m seeing if I can do a kludge in the CloudDB server itself and
detect when a list has been emptied and explicitly set it to an empty
list instead of an empty object. My current thinking is that I can
write a new LUA script, which would eventually get put into the
CloudDB Component, but also do a hack on the CloudDB server that
recognizes the older script and replaces it with the newer one… I
sill have to think about this though…
-Jeff | 92dc8d5419087ad3a5a981e54466a043041aedb3 | 3153792bdf0929e117a94260cdb8839dcefb14b1 | https://github.com/mit-cml/appinventor-sources/compare/92dc8d5419087ad3a5a981e54466a043041aedb3...3153792bdf0929e117a94260cdb8839dcefb14b1 | diff --git a/appinventor/components/src/com/google/appinventor/components/runtime/CloudDB.java b/appinventor/components/src/com/google/appinventor/components/runtime/CloudDB.java
index 033263e88..70a224f1d 100644
--- a/appinventor/components/src/com/google/appinventor/components/runtime/CloudDB.java
+++ b/appinventor/components/src/com/google/appinventor/components/runtime/CloudDB.java
@@ -891,6 +891,9 @@ public final class CloudDB extends AndroidNonvisibleComponent implements Compone
"if (type(decodedValue) == 'table') then " +
" local removedValue = table.remove(decodedValue, 1);" +
" local newValue = cjson.encode(decodedValue);" +
+ " if (newValue == \\"{}\\") then " +
+ " newValue = \\"[]\\" " +
+ " end " +
" redis.call('set', project .. \\":\\" .. key, newValue);" +
" table.insert(subTable, key);" +
" table.insert(subTable1, newValue);" +
@@ -901,7 +904,7 @@ public final class CloudDB extends AndroidNonvisibleComponent implements Compone
" return error('You can only remove elements from a list');" +
"end";
- private static final String POP_FIRST_SCRIPT_SHA1 = "ed4cb4717d157f447848fe03524da24e461028e1";
+ private static final String POP_FIRST_SCRIPT_SHA1 = "68a7576e7dc283a8162d01e3e7c2d5c4ab3ff7a5";
/**
* Obtain the first element of a list and atomically remove it. If two devices use this function | ['appinventor/components/src/com/google/appinventor/components/runtime/CloudDB.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,545,275 | 1,452,431 | 182,250 | 880 | 292 | 104 | 5 | 1 | 3,137 | 532 | 721 | 75 | 0 | 0 | 2021-03-05T20:38:44 | 1,331 | Java | {'Java': 8910424, 'HTML': 1461396, 'Swift': 1035926, 'Objective-C': 647084, 'JavaScript': 637622, 'Scheme': 277513, 'Python': 205314, 'CSS': 170862, 'Shell': 16219, 'SCSS': 12605, 'Batchfile': 4359, 'C': 3930, 'AMPL': 3281, 'Ruby': 3174, 'Lex': 1316, 'Yacc': 1103, 'Dockerfile': 535, 'Makefile': 96} | Apache License 2.0 |
153 | mit-cml/appinventor-sources/2707/2704 | mit-cml | appinventor-sources | https://github.com/mit-cml/appinventor-sources/issues/2704 | https://github.com/mit-cml/appinventor-sources/pull/2707 | https://github.com/mit-cml/appinventor-sources/pull/2707 | 1 | fixes | Update phone call to work on latest Android OS | **Describe the bug**
the make phone call method does nothing.
One possible approach is to use activity starter?
Is there anything we can do for iOS
| a899dd5f575621f44f325c10bc776cbcc0770333 | 7904aff7315131bb69c01b9b83caf7cacfdf2e30 | https://github.com/mit-cml/appinventor-sources/compare/a899dd5f575621f44f325c10bc776cbcc0770333...7904aff7315131bb69c01b9b83caf7cacfdf2e30 | diff --git a/appinventor/components/src/com/google/appinventor/components/runtime/PhoneCall.java b/appinventor/components/src/com/google/appinventor/components/runtime/PhoneCall.java
index 238106d00..c82603167 100644
--- a/appinventor/components/src/com/google/appinventor/components/runtime/PhoneCall.java
+++ b/appinventor/components/src/com/google/appinventor/components/runtime/PhoneCall.java
@@ -26,6 +26,7 @@ import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesPermissions;
+import com.google.appinventor.components.annotations.UsesQueries;
import com.google.appinventor.components.common.ComponentCategory;
import com.google.appinventor.components.common.EndedStatus;
import com.google.appinventor.components.common.PropertyTypeConstants;
@@ -34,6 +35,10 @@ import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.util.BulkPermissionRequest;
import com.google.appinventor.components.runtime.util.PhoneCallUtil;
+import com.google.appinventor.components.annotations.androidmanifest.ActionElement;
+import com.google.appinventor.components.annotations.androidmanifest.ActivityElement;
+import com.google.appinventor.components.annotations.androidmanifest.IntentFilterElement;
+
/**
* 
*
@@ -80,6 +85,11 @@ import com.google.appinventor.components.runtime.util.PhoneCallUtil;
nonVisible = true,
iconName = "images/phoneCall.png")
@SimpleObject
+@UsesQueries(intents = {
+ @IntentFilterElement(
+ actionElements = {@ActionElement(name = Intent.ACTION_DIAL)}
+ )
+ })
public class PhoneCall extends AndroidNonvisibleComponent implements Component, OnDestroyListener,
ActivityResultListener {
| ['appinventor/components/src/com/google/appinventor/components/runtime/PhoneCall.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 7,187,316 | 1,600,734 | 200,416 | 928 | 467 | 87 | 10 | 1 | 158 | 26 | 32 | 8 | 0 | 0 | 2022-07-27T17:07:48 | 1,331 | Java | {'Java': 8910424, 'HTML': 1461396, 'Swift': 1035926, 'Objective-C': 647084, 'JavaScript': 637622, 'Scheme': 277513, 'Python': 205314, 'CSS': 170862, 'Shell': 16219, 'SCSS': 12605, 'Batchfile': 4359, 'C': 3930, 'AMPL': 3281, 'Ruby': 3174, 'Lex': 1316, 'Yacc': 1103, 'Dockerfile': 535, 'Makefile': 96} | Apache License 2.0 |
152 | mit-cml/appinventor-sources/2643/2642 | mit-cml | appinventor-sources | https://github.com/mit-cml/appinventor-sources/issues/2642 | https://github.com/mit-cml/appinventor-sources/pull/2643 | https://github.com/mit-cml/appinventor-sources/pull/2643#issuecomment-1047273775 | 2 | fixes | Reading images out of CloudDB is broken as of nb188 | **Describe the bug**
As the title says, reading images is broken. Attempting to do so results in a CloudDB Error.
**Affects**
<!--
Please check off the part of the system that is affected by the bug.
-->
- [ ] Designer
- [ ] Blocks editor
- [x] Companion
- [x] Compiled apps
- [ ] Buildserver
- [ ] Debugging
- [ ] Other... (please describe)
**Expected behavior**
Images should be able to be read from CloudDB
**Steps to reproduce**
Attempt to read an image out of CloudDB
| 224a5928763cc43f04a912c0b546c7f37f13084d | d6560ded38e55cb7ce9d4ffadd4290b3715c4052 | https://github.com/mit-cml/appinventor-sources/compare/224a5928763cc43f04a912c0b546c7f37f13084d...d6560ded38e55cb7ce9d4ffadd4290b3715c4052 | diff --git a/appinventor/components/src/com/google/appinventor/components/runtime/util/JsonUtil.java b/appinventor/components/src/com/google/appinventor/components/runtime/util/JsonUtil.java
index 8f2e2f4c0..51b269d6a 100644
--- a/appinventor/components/src/com/google/appinventor/components/runtime/util/JsonUtil.java
+++ b/appinventor/components/src/com/google/appinventor/components/runtime/util/JsonUtil.java
@@ -6,6 +6,7 @@
package com.google.appinventor.components.runtime.util;
import android.content.Context;
+import android.net.Uri;
import android.util.Base64;
import android.util.Log;
@@ -445,19 +446,37 @@ public class JsonUtil {
*/
private static String writeFile(Form context, final String input, String fileExtension) {
String fullDirName = context.getDefaultPath(BINFILE_DIR);
- File destDirectory = new File(fullDirName);
+ String preAmble = Uri.parse(context.getDefaultPath("")).getPath();
+
+ ///////////////////////////////////////////////////////////////////////////////
+ // What's going on here? //
+ // //
+ // fullDirName is in fact the full path name of the BINFILE_DIR AS A URI! //
+ // preAmble is the parent directory path as a plain path. FileWriteOperation //
+ // takes a path, but then uses the file scope to pre-pend the appropriate //
+ // directory. However this is already included in the variable "dest" //
+ // returned by File.createTempFile. So we use preAmble to remove it. //
+ // Obscure for sure... but there it is! //
+ ///////////////////////////////////////////////////////////////////////////////
+
+ File destDirectory = new File(Uri.parse(fullDirName).getPath());
final Synchronizer<Boolean> result = new Synchronizer<>();
File dest;
try {
dest = File.createTempFile("BinFile", "." + fileExtension, destDirectory);
new FileWriteOperation(context, context, "Write",
- dest.getAbsolutePath().replace(context.getDefaultPath(""), ""),
+ dest.getAbsolutePath().replace(preAmble, ""),
context.DefaultFileScope(), false, true) {
@Override
protected boolean process(OutputStream stream) throws IOException {
- stream.write(Base64.decode(input, Base64.DEFAULT));
- result.wakeup(true);
- return true;
+ try {
+ stream.write(Base64.decode(input, Base64.DEFAULT));
+ result.wakeup(true);
+ return true;
+ } catch (Exception e) {
+ result.caught(e);
+ return true;
+ }
}
}.run();
result.waitfor();
@@ -473,7 +492,7 @@ public class JsonUtil {
}
}
} catch (IOException e) {
- throw new YailRuntimeError(e.getMessage(), "Write");
+ throw new YailRuntimeError(e.getMessage() + " destDirectory: " + destDirectory, "Write");
}
trimDirectory(20, destDirectory);
return dest.getAbsolutePath(); | ['appinventor/components/src/com/google/appinventor/components/runtime/util/JsonUtil.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 6,671,124 | 1,480,109 | 185,527 | 913 | 1,724 | 308 | 31 | 1 | 506 | 89 | 129 | 26 | 0 | 0 | 2022-02-21T22:36:36 | 1,331 | Java | {'Java': 8910424, 'HTML': 1461396, 'Swift': 1035926, 'Objective-C': 647084, 'JavaScript': 637622, 'Scheme': 277513, 'Python': 205314, 'CSS': 170862, 'Shell': 16219, 'SCSS': 12605, 'Batchfile': 4359, 'C': 3930, 'AMPL': 3281, 'Ruby': 3174, 'Lex': 1316, 'Yacc': 1103, 'Dockerfile': 535, 'Makefile': 96} | Apache License 2.0 |
1,094 | sqlancer/sqlancer/13/12 | sqlancer | sqlancer | https://github.com/sqlancer/sqlancer/issues/12 | https://github.com/sqlancer/sqlancer/pull/13 | https://github.com/sqlancer/sqlancer/pull/13 | 1 | fixes | sqlancer/mysql/gen/MySQLInsertGenerator.java | Query generateInto()
for (int row = 0; row < nrRows; row++) {
if (row != 0) {
sb.append(", ");
}
sb.append("(");
for (int c = 0; c < columns.size(); c++) {
if (c++ != 0) { <---- typo, should be c
sb.append(", ");
}
sb.append(MySQLVisitor.asString(gen.generateConstant()));
}
sb.append(")");
}
| 6d986e909d1316b1b89f25f717e59ccb92972720 | 974fae8b55ee916e0d1b341f016639fe685fcc1e | https://github.com/sqlancer/sqlancer/compare/6d986e909d1316b1b89f25f717e59ccb92972720...974fae8b55ee916e0d1b341f016639fe685fcc1e | diff --git a/src/sqlancer/mysql/gen/MySQLInsertGenerator.java b/src/sqlancer/mysql/gen/MySQLInsertGenerator.java
index 563997da..b29c6aa4 100644
--- a/src/sqlancer/mysql/gen/MySQLInsertGenerator.java
+++ b/src/sqlancer/mysql/gen/MySQLInsertGenerator.java
@@ -81,7 +81,7 @@ public class MySQLInsertGenerator {
}
sb.append("(");
for (int c = 0; c < columns.size(); c++) {
- if (c++ != 0) {
+ if (c != 0) {
sb.append(", ");
}
sb.append(MySQLVisitor.asString(gen.generateConstant())); | ['src/sqlancer/mysql/gen/MySQLInsertGenerator.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,414,899 | 275,732 | 38,693 | 361 | 63 | 19 | 2 | 1 | 477 | 48 | 107 | 17 | 0 | 0 | 2020-06-03T19:26:47 | 1,293 | Java | {'Java': 3648769, 'Python': 3131, 'Dockerfile': 313} | MIT License |
1,093 | sqlancer/sqlancer/87/86 | sqlancer | sqlancer | https://github.com/sqlancer/sqlancer/issues/86 | https://github.com/sqlancer/sqlancer/pull/87 | https://github.com/sqlancer/sqlancer/pull/87 | 1 | fixes | Value generated by SQLancer for jit_inline_above_cost is outside the correct range | The command `SET SESSION jit_inline_above_cost=-134588496167444481;` was produced by SQLancer, which raises the error: `org.postgresql.util.PSQLException: ERROR: -1.34588e+17 is outside the valid range for parameter "jit_inline_above_cost" (-1 .. 1.79769e+308)`. | 21e15906905b0b6e112b9738a399f31631278e1c | 3600196eea66b51e5a0b36707676714dbe4c8352 | https://github.com/sqlancer/sqlancer/compare/21e15906905b0b6e112b9738a399f31631278e1c...3600196eea66b51e5a0b36707676714dbe4c8352 | diff --git a/src/sqlancer/Randomly.java b/src/sqlancer/Randomly.java
index 99a26792..1488ed43 100644
--- a/src/sqlancer/Randomly.java
+++ b/src/sqlancer/Randomly.java
@@ -410,7 +410,10 @@ public final class Randomly {
if (lower > upper) {
throw new IllegalArgumentException(lower + " " + upper);
}
- return lower + ((long) (getThreadRandom().get().nextDouble() * (upper - lower)));
+ if (lower == upper) {
+ return lower;
+ }
+ return (long) (getThreadRandom().get().longs(lower, upper).findFirst().getAsLong());
}
private static int getNextInt(int lower, int upper) {
diff --git a/src/sqlancer/mariadb/gen/MariaDBSetGenerator.java b/src/sqlancer/mariadb/gen/MariaDBSetGenerator.java
index 33fead6b..270862d7 100644
--- a/src/sqlancer/mariadb/gen/MariaDBSetGenerator.java
+++ b/src/sqlancer/mariadb/gen/MariaDBSetGenerator.java
@@ -124,7 +124,6 @@ public class MariaDBSetGenerator {
* ("batched_key_access", /*"block_nested_loop", "condition_fanout_filter",
*/
"condition_pushdown_for_derived", // MariaDB
- "condition_pushdown_for_subquery", // MariaDB
"derived_merge", //
"derived_with_keys", // MariaDB
"engine_condition_pushdown", //
diff --git a/test/sqlancer/TestRandomly.java b/test/sqlancer/TestRandomly.java
index 375752d8..6be6b1bd 100644
--- a/test/sqlancer/TestRandomly.java
+++ b/test/sqlancer/TestRandomly.java
@@ -184,6 +184,16 @@ public class TestRandomly {
assertEquals(0, r.getLong(0, 1));
}
+ @Test
+ public void testLong2() {
+ Randomly r = new Randomly();
+ for (int i = 0; i < NR_MIN_RUNS; i++) {
+ long val = r.getLong(-1, Long.MAX_VALUE);
+ assertTrue(val >= -1);
+ assertTrue(val < Long.MAX_VALUE);
+ }
+ }
+
@Test // check that when given a seed, each thread computes a consistent result
public void testSeed() {
int seed = 123; | ['src/sqlancer/Randomly.java', 'src/sqlancer/mariadb/gen/MariaDBSetGenerator.java', 'test/sqlancer/TestRandomly.java'] | {'.java': 3} | 3 | 3 | 0 | 0 | 3 | 1,553,498 | 311,779 | 43,474 | 396 | 319 | 73 | 6 | 2 | 262 | 27 | 73 | 1 | 0 | 0 | 2020-07-06T19:16:40 | 1,293 | Java | {'Java': 3648769, 'Python': 3131, 'Dockerfile': 313} | MIT License |
328 | ff4j/ff4j/410/399 | ff4j | ff4j | https://github.com/ff4j/ff4j/issues/399 | https://github.com/ff4j/ff4j/pull/410 | https://github.com/ff4j/ff4j/pull/410 | 1 | fix | EventRepositoryRedis uses different keys for storing and retrieving audit data | Hi,
i am trying to use the `EventRespositoryRedis` to store monitoring data. Unfortunately the web ui always stays empty. Switching to a `JdbcEventRepository` immediately solves the problem, so i guess my basic setup is okay. Here is a snippet of my ff4j configuration:
```
...
val ff4j = new FF4j();
ff4j.setFeatureStore(new JdbcFeatureStore(ds));
ff4j.setPropertiesStore(new JdbcPropertyStore(ds));
val redisConnection = new RedisConnection(redisHost, redisPort);
ff4j.setEventRepository(new EventRepositoryRedis(redisConnection));
ff4j.audit(true);
...
```
After digging into the code i can see, that for storing the data a key like this is used `"FF4J_EVENT_AUDITRAIL_20200113"` while for retrieving the data a key like this is used `"FF4J_EVENT_20200113".` Only the first key exists in my redis instance.
Is there any magic behind this or is this a bug? | e596cdbc6c89462415a4ef471eadd19a5d87f7df | a8b5769720fd5912538a065119b0e3751ba76e98 | https://github.com/ff4j/ff4j/compare/e596cdbc6c89462415a4ef471eadd19a5d87f7df...a8b5769720fd5912538a065119b0e3751ba76e98 | diff --git a/ff4j-store-redis/src/main/java/org/ff4j/store/EventRepositoryRedis.java b/ff4j-store-redis/src/main/java/org/ff4j/store/EventRepositoryRedis.java
index 1eea1257..86e4db7b 100644
--- a/ff4j-store-redis/src/main/java/org/ff4j/store/EventRepositoryRedis.java
+++ b/ff4j-store-redis/src/main/java/org/ff4j/store/EventRepositoryRedis.java
@@ -27,7 +27,6 @@ import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ff4j.audit.Event;
-import org.ff4j.audit.EventConstants;
import org.ff4j.audit.EventQueryDefinition;
import org.ff4j.audit.EventSeries;
import org.ff4j.audit.MutableHitCount;
@@ -139,7 +138,7 @@ public class EventRepositoryRedis extends AbstractEventRepository {
try {
jedis = getJedis();
long timeStamp = evt.getTimestamp();
- String hashId = this.getHashKey(evt.getTimestamp(), evt.getAction());
+ String hashId = this.getHashKey(evt.getTimestamp());
evt.setUuid(String.valueOf(timeStamp));
jedis.zadd(hashId, timeStamp, objectMapper.writeValueAsString(evt));
return true;
@@ -153,13 +152,9 @@ public class EventRepositoryRedis extends AbstractEventRepository {
}
}
- private String getHashKey(long timestamp, String action) {
- String hashId = KEY_EVENT;
- if (action != null) {
- hashId += RedisContants.KEY_EVENT_AUDIT + "_";
- }
- long timeStamp = timestamp;
- hashId += SDF_KEY.format(new Date(timeStamp));
+ private String getHashKey(long timestamp) {
+ String hashId = KEY_EVENT + RedisContants.KEY_EVENT_AUDIT + "_";
+ hashId += SDF_KEY.format(new Date(timestamp));
return hashId;
}
@@ -171,7 +166,7 @@ public class EventRepositoryRedis extends AbstractEventRepository {
Jedis jedis = null;
try {
jedis = getJedis();
- String hashKey = getHashKey(timestamp, null);
+ String hashKey = getHashKey(timestamp);
// Check for the event within 100ms time range passed, hoping there won't be more than 10 for this.
Set<String> events = jedis.zrangeByScore(hashKey, timestamp - 100L, timestamp + 100L, 0, 10);
@@ -271,7 +266,7 @@ public class EventRepositoryRedis extends AbstractEventRepository {
EventSeries eventSeries = new EventSeries();
try {
jedis = getJedis();
- String hashKey = getHashKey(query.getFrom(), EventConstants.ACTION_CHECK_OK);
+ String hashKey = getHashKey(query.getFrom());
Set<String> events = jedis.zrangeByScore(hashKey, query.getFrom(), query.getTo(), 0, 100);
// FIXME: Server side pagination model isn't present? This could be a lot of data.
@@ -325,7 +320,7 @@ public class EventRepositoryRedis extends AbstractEventRepository {
Set<String> events = null;
try {
jedis = getJedis();
- String hashKey = getHashKey(query.getFrom(), null);
+ String hashKey = getHashKey(query.getFrom());
events = jedis.zrangeByScore(hashKey, query.getFrom(), query.getTo(), 0, UPPER_LIMIT);
} finally {
if (jedis != null) { | ['ff4j-store-redis/src/main/java/org/ff4j/store/EventRepositoryRedis.java'] | {'.java': 1} | 1 | 1 | 0 | 0 | 1 | 1,845,630 | 405,454 | 59,694 | 405 | 1,047 | 221 | 19 | 1 | 924 | 117 | 222 | 16 | 0 | 1 | 2020-05-05T14:53:39 | 1,285 | Java | {'Java': 2711080, 'CSS': 224698, 'HTML': 191359, 'JavaScript': 19189, 'Batchfile': 3140, 'FreeMarker': 196} | Apache License 2.0 |
Subsets and Splits