Eclipse debugger JUnit runs pulls wrong class - java

When I step through my JUnit, I hit a line that runs, and the intended source code is shown in Eclipse. However, the JVM (launched by Eclipse JUnit launcher), does not use the right version. Instead, it's using some other (older) version. Because I'm not sure what's causing this, I'm providing more details than necessary.
I had built this at one point using the Gradle on the command line, and had 2 versions of the same class file. One at /bin/com.woodsman... the other /bin/main/com.woodsman...
I'm using
Java 11
Eclipse: Version: 2021-09 (4.21.0)
Eclipse: Build id: 20210910-1417
Gradle
JUnit 5
Junit Launcher:
Run Single test:
Test Class: com.woodsman.MyClassTest
Test Method: mySingleTest
Test Runner: JUnit 5
Program Argument: None
VM Arguments: -ea
Checked: Use the -XX:+ShowCodeDetails...
Project execution environment 'JavaSE-11'
Dependencies:
Modulepath entries: (empty)
Classpath Entries
JUnit 5 (Advanced Library)
my-project (My Eclipse project)
Project and External Dependencies
JRE System Library [JavaSE-11]
Source:
Default
org.junits (too many to list)
my-project
java ~/my-project/src/test
java ~/my-project/src/main
resource ~/my-project/src/test
resource ~/my-project/src/main
my-project
.git-crypt ~/my-project
.gradle ~/my-project
... many others ...
bin ~/my-project
build ~/my-project
#WebMvcTest(ApiController.class)
class ApiControllerTest extends MVCBaseTest {
#Test
public void generateFeedbackShouldUseBody() throws Exception {
FeedbackResponse feedbackResponse = new FeedbackResponse();
when(toolInvokerService.generateFeedback(any(), any(), any(Boolean.class))).thenReturn(feedbackResponse);
setDinahClient(toolInvokerService);
ObjectNode requestBody = objectMapper.createObjectNode();
requestBody.put("content", "Cellular phones and laptops are used in communication everyone is using it every day.");
final MockHttpServletRequestBuilder request = post("/feedback")
.contentType(MediaType.APPLICATION_JSON)
.content(requestBody.toString());
ResultActions mockResult = mockMvc.perform(request);
String responseBody = mockResult.andReturn().getResponse().getContentAsString();
System.out.println("###="+responseBody);
}
}
#Service
public class ToolInvokerService {
private static final Logger log = LoggerFactory.getLogger(ToolInvokerService.class);
private final ContentBuilderService contentBuilderService;
private final ToolNormalizerService toolNormalizerService;
private final FeedbackMapperService feedbackMapperService;
private final LangDetectClient langDetectClient;
private final GrammartoolClient grammarToolClient;
private final AtdClient atdClient;
private final DinahClient dinahClient;
private final CybertronClient cybertronClient;
private final StatsLogger statsLogger;
public ToolInvokerService(
ContentBuilderService contentBuilderService,
ToolNormalizerService toolNormalizerService,
FeedbackMapperService feedbackMapperService,
LangDetectClient langDetectClient,
GrammartoolClient grammarToolClient,
AtdClient atdClient,
DinahClient dinahClient,
CybertronClient cybertronClient,
StatsLogger statsLogger) {
this.contentBuilderService = contentBuilderService;
this.toolNormalizerService = toolNormalizerService;
this.feedbackMapperService = feedbackMapperService;
this.langDetectClient = langDetectClient;
this.grammarToolClient = grammarToolClient;
this.atdClient = atdClient;
this.dinahClient = dinahClient;
this.cybertronClient = cybertronClient;
this.statsLogger = statsLogger;
}
public FeedbackResponse generateFeedback(UserContext userContext, String content, boolean trustLineBreaks) throws
InterruptedException,
ExecutionException,
URISyntaxException,
IOException,
UnsupportedLanguageException {
long startTime = System.currentTimeMillis();
FileOutputStream fop = new FileOutputStream("ben-test.log",true);
fop.write("gfb-bp1\n".getBytes());
if (StringUtils.isEmpty(content)) {
return getEmptyFeedbackResponse(0L);
}
// to reduce call time on long content verify english for up to 5000 characters
langDetectClient.verifyEnglish(CleanTextUtil.truncateText(content, 5000));
fop.write("gfb-bp2\n".getBytes());
fop.write("gfb-bp2.5\n".getBytes());
fop.write(("## dinahClient.hashCode()="+dinahClient.hashCode()).getBytes());
fop.write(("## dinahClient.toString()="+dinahClient.toString()).getBytes());
ContentMetadataResponse contentMetadata = dinahClient.getContentMetadata(content, trustLineBreaks);
log.info("###"+contentMetadata);
if (CollectionUtils.isEmpty(contentMetadata.getContentMetadata().getSentences())) {
log.warn("no sentence content found: returning empty feedback response");
return getEmptyFeedbackResponse(contentMetadata.getTimedResponseDurationMs());
}
List<ToolResponse> toolResponses = getNormalizedToolResponses(userContext.getLocale(), content, contentMetadata.getContentMetadata());
List<Observation> normalizedObservations = new ArrayList<>();
Map<Tool, Long> toolDurations = new HashMap<>();
Map<Tool, Integer> observationCounts = new HashMap<>();
Map<Tool, List<?>> toolErrors = new HashMap<>();
for (ToolResponse toolResponse : toolResponses) {
normalizedObservations.addAll(toolResponse.getNormalizedToolObservations());
toolDurations.put(toolResponse.getTool(), toolResponse.getTimedResponseDurationMs());
observationCounts.put(toolResponse.getTool(), toolResponse.getToolObservations().size());
toolErrors.put(toolResponse.getTool(), toolResponse.getToolErrors());
}
List<Observation> finalObservations = feedbackMapperService.constructFinalObservations(userContext.getLocale(),
normalizedObservations,
content,
contentMetadata.getContentMetadata());
Stats stats = new Stats(
System.currentTimeMillis() - startTime,
contentMetadata.getTimedResponseDurationMs(),
toolDurations,
observationCounts,
contentMetadata.getContentMetadata().getIgnoreSpans());
FeedbackResponse feedbackResponse = new FeedbackResponse(toolErrors, stats, finalObservations);
statsLogger.logResponse(userContext, feedbackResponse, content, normalizedObservations);
return feedbackResponse;
}
}

Related

How to spock integration test with standalone tomcat runner?

Our project is not currently using a spring framework.
Therefore, it is being tested based on the standalone tomcat runner.
However, since integration-enabled tests such as #SpringBootTest are not possible, Tomcat is operated in advance and the HTTP API test is carried out using Spock.
Is there a way to turn this like #SpringBootTest?
TomcatRunner
private Tomcat tomcat = null;
private int port = 8080;
private String contextPath = null;
private String docBase = null;
private Context rootContext = null;
public Tomcat8Launcher(){
init();
}
public Tomcat8Launcher(int port, String contextPath, String docBase){
this.port = port;
this.contextPath = contextPath;
this.docBase = docBase;
init();
}
private void init(){
tomcat = new Tomcat();
tomcat.setPort(port);
tomcat.enableNaming();
if(contextPath == null){
contextPath = "";
}
if(docBase == null){
File base = new File(System.getProperty("java.io.tmpdir"));
docBase = base.getAbsolutePath();
}
rootContext = tomcat.addContext(contextPath, docBase);
}
public void addServlet(String servletName, String uri, HttpServlet servlet){
Tomcat.addServlet(this.rootContext, servletName, servlet);
rootContext.addServletMapping(uri, servletName);
}
public void addListenerServlet(ServletContextListener listener){
rootContext.addApplicationListener(listener.getClass().getName());
}
public void startServer() throws LifecycleException {
tomcat.start();
tomcat.getServer().await();
}
public void stopServer() throws LifecycleException {
tomcat.stop();
}
public static void main(String[] args) throws Exception {
System.setProperty("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager");
System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "org.apache.naming.java.javaURLContextFactory");
System.setProperty(javax.naming.Context.URL_PKG_PREFIXES, "org.apache.naming");
Tomcat8Launcher tomcatServer = new Tomcat8Launcher();
tomcatServer.addListenerServlet(new ConfigInitBaseServlet());
tomcatServer.addServlet("restServlet", "/rest/*", new RestServlet());
tomcatServer.addServlet("jsonServlet", "/json/*", new JsonServlet());
tomcatServer.startServer();
}
Spock API Test example
class apiTest extends Specification {
//static final Tomcat8Launcher tomcat = new Tomcat8Launcher()
static final String testURL = "http://localhost:8080/api/"
#Shared
def restClient
def setupSpec() {
// tomcat.main()
restClient = new RESTClient(testURL)
}
def 'findAll user'() {
when:
def response = restClient.get([path: 'user/all'])
then:
with(response){
status == 200
contentType == "application/json"
}
}
}
The test will not work if the annotations are removed from the annotations below.
// static final Tomcat8Launcher tomcat = new Tomcat8Launcher()
This line is specified API Test at the top.
// tomcat.main()
This line is specified API Test setupSpec() method
I don't know why, but only logs are recorded after Tomcat has operated and the test method is not executed.
Is there a way to fix this?
I would suggest to create a Spock extension to encapsulate everything you need. See writing custom extensions of the Spock docs as well as the built-in extensions for inspiration.

Cannot find symbol symbol method of() location interface java.util.list, unittests

I'm currently working on a spring boot application, where I'm now writing the unittests. But when I run the command 'mvn test', I get the following error:
This is the code
BoardServiceTest.java
#Test
public void testGetBoards() {
Set<Lists> list = new HashSet<Lists>();
List<Board> expectedBoards = List.of(
new Board(1, "Studie", list),
new Board(2, "Todo Applicatie", list)
);
when(this.boardRepo.findAll()).thenReturn(expectedBoards);
var receivedBoards = this.boardService.getBoards();
assertEquals(expectedBoards, receivedBoards);
verify(this.boardRepo, times(1)).findAll();
}
ListsServiceTest.java
#Test
public void testGetLists() throws Exception {
int boardId = 1;
Set<Task> task = new HashSet<>();
List<Lists> expectedLists = List.of(
new Lists(1, "registered", new Board(), task),
new Lists(2, "open", new Board(), task)
);
when(this.listRepository.findAll()).thenReturn(expectedLists);
var receivedLists = this.listService.getLists(boardId);
assertEquals(expectedLists, receivedLists);
verify(this.listRepository, times(1)).findAll();
}
List.of was added in Java 9, you need to make sure that you use JDK 9+ to compile your sources and that appropriate java version is set in the maven pom.xml.

Setting envvars in Jenkins plugin

I'm trying to develop new Jenkins plugin. I've started from hello-world archetype provided by Jenkins. My plugin works fine!
Bun now i want to put some environment variables from my plugin. I've used whis code to do it
public void perform(Run<?, ?> run, FilePath workspace, Launcher launcher, TaskListener listener) {
...
EnvVars envVars = run.getEnvironment(listener);
envVars.put("SOME_VARIABLE", "SOME_VALUE");
...
}
But it don't work. I'm trying to use this variable on next build step and got nothing. I've googled it and there isn't quite clear discriptions. Source codes of existing plugins (EnvInject, etc) also doesn't help.
What am i doing wrong? Can anybody provide me some samples?
From my plugin...
private void putEnvVar(String key, String value) throws IOException {
Jenkins jenkins = Jenkins.getInstance();
DescribableList<NodeProperty<?>, NodePropertyDescriptor> globalNodeProperties = jenkins.getGlobalNodeProperties();
List<EnvironmentVariablesNodeProperty> envVarsNodePropertyList = globalNodeProperties.getAll(hudson.slaves.EnvironmentVariablesNodeProperty.class);
EnvironmentVariablesNodeProperty newEnvVarsNodeProperty = null;
EnvVars envVars = null;
if (envVarsNodePropertyList == null || envVarsNodePropertyList.isEmpty()) {
newEnvVarsNodeProperty = new hudson.slaves.EnvironmentVariablesNodeProperty();
globalNodeProperties.add(newEnvVarsNodeProperty);
envVars = newEnvVarsNodeProperty.getEnvVars();
} else {
envVars = envVarsNodePropertyList.get(0).getEnvVars();
}
envVars.put(key, value);
}

ElasticSearch in-memory for testing

I would like to write some integration with ElasticSearch. For testing I would like to run in-memory ES.
I found some information in documentation, but without example how to write those kind of test. Elasticsearch Reference [1.6] » Testing » Java Testing Framework » integration tests
« unit tests
Also I found following article, but it's out of data. Easy JUnit testing with Elastic Search
I looking example how to start and run ES in-memory and access to it over REST API.
Based on the second link you provided, I created this abstract test class:
#RunWith(SpringJUnit4ClassRunner.class)
public abstract class AbstractElasticsearchTest {
private static final String HTTP_PORT = "9205";
private static final String HTTP_TRANSPORT_PORT = "9305";
private static final String ES_WORKING_DIR = "target/es";
private static Node node;
#BeforeClass
public static void startElasticsearch() throws Exception {
removeOldDataDir(ES_WORKING_DIR + "/" + clusterName);
Settings settings = Settings.builder()
.put("path.home", ES_WORKING_DIR)
.put("path.conf", ES_WORKING_DIR)
.put("path.data", ES_WORKING_DIR)
.put("path.work", ES_WORKING_DIR)
.put("path.logs", ES_WORKING_DIR)
.put("http.port", HTTP_PORT)
.put("transport.tcp.port", HTTP_TRANSPORT_PORT)
.put("index.number_of_shards", "1")
.put("index.number_of_replicas", "0")
.put("discovery.zen.ping.multicast.enabled", "false")
.build();
node = nodeBuilder().settings(settings).clusterName("monkeys.elasticsearch").client(false).node();
node.start();
}
#AfterClass
public static void stopElasticsearch() {
node.close();
}
private static void removeOldDataDir(String datadir) throws Exception {
File dataDir = new File(datadir);
if (dataDir.exists()) {
FileSystemUtils.deleteRecursively(dataDir);
}
}
}
In the production code, I configured an Elasticsearch client as follows. The integration test extends the above defined abstract class and configures property elasticsearch.port as 9305 and elasticsearch.host as localhost.
#Configuration
public class ElasticsearchConfiguration {
#Bean(destroyMethod = "close")
public Client elasticsearchClient(#Value("${elasticsearch.clusterName}") String clusterName,
#Value("${elasticsearch.host}") String elasticsearchClusterHost,
#Value("${elasticsearch.port}") Integer elasticsearchClusterPort) throws UnknownHostException {
Settings settings = Settings.settingsBuilder().put("cluster.name", clusterName).build();
InetSocketTransportAddress transportAddress = new InetSocketTransportAddress(InetAddress.getByName(elasticsearchClusterHost), elasticsearchClusterPort);
return TransportClient.builder().settings(settings).build().addTransportAddress(transportAddress);
}
}
That's it. The integration test will run the production code which is configured to connect to the node started in the AbstractElasticsearchTest.startElasticsearch().
In case you want to use the elasticsearch REST api, use port 9205. E.g. with Apache HttpComponents:
HttpClient httpClient = HttpClients.createDefault();
HttpPut httpPut = new HttpPut("http://localhost:9205/_template/" + templateName);
httpPut.setEntity(new FileEntity(new File("template.json")));
httpClient.execute(httpPut);
Here is my implementation
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.UUID;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
/**
*
* #author Raghu Nair
*/
public final class ElasticSearchInMemory {
private static Client client = null;
private static File tempDir = null;
private static Node elasticSearchNode = null;
public static Client getClient() {
return client;
}
public static void setUp() throws Exception {
tempDir = File.createTempFile("elasticsearch-temp", Long.toString(System.nanoTime()));
tempDir.delete();
tempDir.mkdir();
System.out.println("writing to: " + tempDir);
String clusterName = UUID.randomUUID().toString();
elasticSearchNode = NodeBuilder
.nodeBuilder()
.local(false)
.clusterName(clusterName)
.settings(
ImmutableSettings.settingsBuilder()
.put("script.disable_dynamic", "false")
.put("gateway.type", "local")
.put("index.number_of_shards", "1")
.put("index.number_of_replicas", "0")
.put("path.data", new File(tempDir, "data").getAbsolutePath())
.put("path.logs", new File(tempDir, "logs").getAbsolutePath())
.put("path.work", new File(tempDir, "work").getAbsolutePath())
).node();
elasticSearchNode.start();
client = elasticSearchNode.client();
}
public static void tearDown() throws Exception {
if (client != null) {
client.close();
}
if (elasticSearchNode != null) {
elasticSearchNode.stop();
elasticSearchNode.close();
}
if (tempDir != null) {
removeDirectory(tempDir);
}
}
public static void removeDirectory(File dir) throws IOException {
if (dir.isDirectory()) {
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
for (File aFile : files) {
removeDirectory(aFile);
}
}
}
Files.delete(dir.toPath());
}
}
You can start ES on your local with:
Settings settings = Settings.settingsBuilder()
.put("path.home", ".")
.build();
NodeBuilder.nodeBuilder().settings(settings).node();
When ES started, access it over REST like:
http://localhost:9200/_cat/health?v
As of 2016 embedded elasticsearch is no-longer supported
As per a response from one of the devlopers in 2017 you can use the following approaches:
Use the Gradle tools elasticsearch already has. You can read some information about this here: https://github.com/elastic/elasticsearch/issues/21119
Use the Maven plugin: https://github.com/alexcojocaru/elasticsearch-maven-plugin
Use Ant scripts like http://david.pilato.fr/blog/2016/10/18/elasticsearch-real-integration-tests-updated-for-ga
Using Docker: https://www.testcontainers.org/modules/elasticsearch
Using Docker from maven: https://github.com/dadoonet/fscrawler/blob/e15dddf72b1ed094dad279d492e4e0314f73683f/pom.xml#L241-L289

Rhino debugger - can't hit a breakpoint?

I have this legacy web application on inspection, that utilizes Rhino to script some database import tasks. Scripted task is executed properly.
However, I can't seem to hit a breakpoint in the JS script file. I'm not even sure if it's possible to debug JS script this way, so if anyone can give some more insight or advice... The core setup is as follows:
Debugger (listener):
// executes when the app is launched...
if (!ContextFactory.hasExplicitGlobal()) {
ContextFactory cxf = new ContextFactory();
ContextFactory.initGlobal(cxf);
String rhino = "transport=socket,suspend=y,address=9999";
System.out.println(">>>>>> RHINO <<<<<<");
RhinoDebugger debugger = new RhinoDebugger(rhino);
debugger.start();
cxf.addListener(debugger);
}
...
Scripting context:
#Component("importDebugMockup")
public class ImportDebugMockup extends Import {
#Autowired private SomeDAO someDAO;
#Autowired private SomeOtherDAO someOtherDAO;
...
private ContextFactory cxf;
private Document doc;
public ImportDebugMockup() {
this.cxf = ContextFactory.getGlobal();
}
...
#Transactional
public Map<String, Object> doImport(final String scriptName, final String filePath)
throws ScriptException, IOException {
final Map<String, Object> results = new HashMap<>();
final String scriptSource = this.readJSFileToString(filePath, Charset.forName("UTF-8"));
Context context = this.cxf.enterContext();
try {
Scriptable scope = new ImporterTopLevel(context);
ScriptableObject.putProperty(scope, "doc", doc);
ScriptableObject.putProperty(scope, "someDAO", this.someDAO);
ScriptableObject.putProperty(scope, "someOtherDAO", this.someOtherDAO);
...
ScriptableObject.putProperty(scope, "results", results);
Object functionArgs[] = { "null" };
String scriptExecName = "script" + scriptName + ".js";
context.evaluateString(scope, scriptSource, scriptExecName, 1, null);
Function fct = (Function) scope.get("doImport", scope); // call doImport()
Object result = fct.call(context, scope, scope, functionArgs);
} finally {
Context.exit();
}
return results;
}
}
The script:
importPackage(java.io);
importPackage(some.package);
// ...
// some utility functions here
// ...
function doImport() {
...
var i = 0; // set breakpoint here - can't hit it
someDAO.doSomething(...); // using propagated java object
...
someOtherDAO.doSomethingElse();
...
}
EDIT
Remote JS Debug configuration (with Mozilla Rhino - Attaching Connector at port 9999) is set up, like in this article, for example. Source is configured to point to directory where the JS script is located, however it does not suspend on breakpoint...

Categories