Stop record thread witn commit results in mongodb - java

There is a Put method:
#RequestMapping(value = "/api/v1/fias/{fileName}", method = PUT). This method parses files. Parsing occurs in a separate thread. Therefore, for the user, the method works instantly and returns the ID of the created entity in the Mongo database.
There is a Post method:
#RequestMapping (value = "/ api / v1 / fias / interrupt / {objectId}", method = POST). This method should suspend the thread that parses the file from the POST method. Can this be implemented?
I try like this:
#Bean(name = "threadPoolTaskExecutor")
public ThreadPoolTaskExecutor executorService() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(2);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(500);
executor.setThreadNamePrefix("codeinside-");
executor.initialize();
return executor;
}
#RequestMapping(value = "/api/v1/fias/{fileName}", method = PUT)
public ResponseEntity<Document> update(#PathVariable(value="fileName") String fileName) throws BadParamException, NotFinishProcessException {
return ResponseEntity.status(HttpStatus.CREATED).body(fiasQueryService.updateFiasByFileName(fileName));
}
#RequestMapping(value = "/api/v1/fias/interrupt/{objectId}", method = POST)
public ResponseEntity<Document> interrupt(#PathVariable(value="objectId") String objectId) {
return ResponseEntity.status(HttpStatus.OK).body(fiasQueryService.interrupt(objectId));
}
#Service
public class FiasQueryServiceImpl implements FiasQueryService {
#Autowired
private AsyncFias asyncFias;
#Autowired
private ThreadPoolTaskExecutor executorService;
private CompletableFuture<Integer> asyncResult;
#Override
public Document updateFiasByFileName(String fileName) throws NotFinishProcessException {
String settingsPath = settingsService.getStringParam(FIAS_FILE_PATH);
File file = new File(settingsPath + "/" + fileName);
ObjectId objectId = checkAndInsertStatus(file.getName().toLowerCase());
asyncResult = asyncFias.startUpdate(file, objectId);
return new Document("_id", objectId.toString()).append("success", true);
}
#Override
public Document interrupt(String objectIdString) {
setStatus(new ObjectId(objectIdString), INTERRUPT);
asyncResult.cancel(true);
Integer cnt = null;
if (asyncResult.isCancelled()) {
ObjectId objectId = new ObjectId(objectIdString);
try {
cnt = asyncResult.get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
setStatus(objectId, INTERRUPT);
return new Document("success", true).append("count", cnt);
} else {
return new Document("success", false);
}
}
}
#Service
public class AsyncFias {
#Async("threadPoolTaskExecutor")
#Scope(BeanDefinition.SCOPE_PROTOTYPE)
public CompletableFuture<Integer> startUpdate(File file, ObjectId objectId) {
// a lot of code.......
ClientSession session = mongo.startSession(ClientSessionOptions.builder().causallyConsistent(true).build());
MongoDatabase db = getDb(collectionInfo);
MongoCollection<Document> collection =
db.getCollection(collectionInfo.getCollectionName());
collection.insertMany(session, dbObjects);
session.close();
// a lot of code.......
return CompletableFuture.completedFuture(count);
}
}
But I get NPE in the line asyncResult.cancel (true);
I also tried to stop the workflow in this way: executorService.shutdown ();
But in this case, the records that should have been recorded by the time the stream stopped were rolled back. How can I stop the recording stream so that the currently recorded recordings are saved?

I changed the startUpdate method:
#Service
public class AsyncFias {
private static final Logger log = LoggerFactory.getLogger(AsyncFias.class);
private final FiasFileService fiasFileService;
private final MongoDBService mongoDBService;
private AtomicBoolean inProgress = new AtomicBoolean(false);
private AtomicInteger count = new AtomicInteger(0);
AsyncFias(FiasFileService fiasFileService, MongoDBService mongoDBService) {
this.fiasFileService = fiasFileService;
this.mongoDBService = mongoDBService;
}
public Integer getIncrement(){
return count.get();
}
#Async("threadPoolTaskExecutor")
#Scope(BeanDefinition.SCOPE_PROTOTYPE)
public Future<Void> startUpdate(File file) throws InterruptedException {
DbfUtilEnum utilEnum = DbfUtilEnum.fromFileName(file.getName().toLowerCase());
DbfMapper<Document> objMapper = utilEnum.getDbfMapper();
List<Document> dbObjects = fiasFileService.processFile(file, objMapper);
String collectionName = utilEnum.getCollectionName();
EntryMetaInfo metaInfo = new EntryMetaInfo(collectionName, collectionName, null, false, null);
List<List<Document>> lists = ListUtils.partition(dbObjects, 1000);
if (inProgress.compareAndSet(false, true)) {
for (List<Document> it : lists) {
//Thread.sleep(2000);
if (Thread.currentThread().isInterrupted()) {
System.out.println("Cancelled");
inProgress.set(false);
break;
}
mongoDBService.insertBulk(metaInfo, it);
count.getAndIncrement();
try {
TimeUnit.MILLISECONDS.sleep(1);
} catch (InterruptedException e) {
AsyncResult<Void> result = new AsyncResult<>(null);
result.cancel(true);
return result;
}
}
}
lists.clear();
count.set(0);
AsyncResult<Void> result = new AsyncResult<>(null);
result.cancel(true);
return result;
}
}
#Service
public class FiasQueryServiceImpl implements FiasQueryService {
private Future<Void> asyncResult;
// a lot of code
#Override
public Document updateFiasByFileName(String fileName) throws NotFinishProcessException, InterruptedException {
String settingsPath = settingsService.getStringParam(FIAS_FILE_PATH);
File file = new File(settingsPath + "/" + fileName);
ObjectId objectId = checkAndInsertStatus(file.getName().toLowerCase());
asyncResult = asyncFias.process(file);
return new Document("_id", objectId.toString()).append("success", true);
}
#Override
public Document interrupt(String objectIdString) {
asyncResult.cancel(true);
if (asyncResult.isCancelled()) {
log.info("asyncResult.isCancelled()");
ObjectId objectId = new ObjectId(objectIdString);
setStatus(objectId, INTERRUPT);
return new Document("success", true).append("count", asyncFias.getIncrement());
} else {
return new Document("success", false);
}
}
}
General conclusion: for objects of the Future type, the cancel (true) method must be called 2 times: at the time of creation of Future and at the moment of stopping the working thread.

Related

Spring Batch - Not Shutting down - Due to Static Method call

I have completed a spring batch (Standalone Jar) in Spring Boot + CommandLineRunner. But the JVM is not getting shutdown after completion. After doing some research initially I thought its not shutting down because of below reasons.
1 . I am not closing the spring application context at the end ofcommandline runner.
2 . Executor service is not shutdown properly which might have caused the JVM from shutting down.
I dont want to call system.exit which is a forceful shutdown.
I tried closing the application context and also verified executor service is shutdown using isShutdown method (returns true).
Then I found out the root cause, it is because I am calling a static method which is the culprit. When I commented the static method call, the job was shutting down gracefully even if I dont close the application context explicitly.
I am not sure why this behavior and do I need to convert everything to objects or is there something else I am missing here. Can someone please throw some light.
Main Class
#SpringBootApplication
#ComponentScan(basePackages = "com.acn.abp.printbatch")
#EnableTransactionManagement
#ImportResource({ "ABPBatchInfrastructure.xml", "financeBillPayAppConfig.xml" })
public class financeBillPayFileUploadApplication extends PrintBatchConstants implements CommandLineRunner {
#Autowired
private NotifyConfig notify;
#Autowired
private ApplicationContext ctx;
static final Logger logger = LoggerFactory.getLogger(financeBillPayFileUploadApplication.class);
public static void main(String[] args) {
SpringApplication application = new SpringApplication(financeBillPayFileUploadApplication.class);
application.setBannerMode(Banner.Mode.OFF);
application.run(args);
}
#Override
public void run(String... args) throws Exception {
logger.info(notify.getEnvironment());
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
Job job = ctx.getBean(Job.class);
jobLauncher.run(job,
new JobParametersBuilder()
.addString(batchDocumentClass, "InvoiceStatementDocumentation")
.addString(batchType, "2020-06-04")
.addString(batchEmailID, notify.getSupportEmailId())
.addString(batchEnvironment, notify.getEnvironment())
.toJobParameters());
System.out.println("Here cxf");
((ConfigurableApplicationContext)ctx).close();
}
}
Below Class which is causing the problem. If I comment out below code then everything works perfectly.
populateItemDocuments(job, printConfig.geteCMObjectStore(), printConfig.geteCMUserId());
Class file where this method is called
#Component
public class DuplexWorker implements Callable {
static final Logger logger = LoggerFactory.getLogger(DuplexWorker.class);
#Autowired
private ManageFormService formgmtClient;
#Autowired
private PostScriptService postScriptService;
#Autowired
private BarcodeService barcodeService;
private static CfBatchPrintConfiguration printConfig;
private static CfPersistenceUtil dbUtilService;
private static FilenetDocumentRetrieval docmgmtClient;
#Autowired
public DuplexWorker(CfPersistenceUtil dbUtilService,CfBatchPrintConfiguration printConfig,FilenetDocumentRetrieval docmgmtClient) {
DuplexWorker.dbUtilService = dbUtilService;
DuplexWorker.printConfig = printConfig;
DuplexWorker.docmgmtClient=docmgmtClient;
}
private MailUtil mailUtil;
private NotifyConfig notify;
private List<PrintJobItem> printJobItems;
private List<String> groupIds;
private ArrayList duplexJobs;
private String groupId;
private CountDownLatch latch;
public DuplexWorker(ArrayList duplexJobs, String groupId,CountDownLatch latch) {
super();
this.latch=latch;
this.duplexJobs = duplexJobs;
this.groupId = groupId;
}
public DuplexWorker(CountDownLatch latch, MailUtil mailUtil,NotifyConfig notify,List<PrintJobItem> findByPrintStatusEquals,List<String>groupIds) {
this.latch=latch;
this.mailUtil=mailUtil;
this.notify=notify;
this.printJobItems=findByPrintStatusEquals;
this.groupIds=groupIds;
}
#Override
public Object call() throws Exception {
try {
if ((duplexJobs != null) && (!duplexJobs.isEmpty())) {
String prevJobId = null;
int docCount = 0;
CvPrintJob consolidatedPrintJob = (CvPrintJob)duplexJobs.get(0);
ArrayList printItems = new ArrayList();
if (consolidatedPrintJob != null)
{
ArrayList items = consolidatedPrintJob.getPrintJobItems();
int numPages = 0;
if ((items != null) && (!items.isEmpty()))
{
CvPrintJobItem firstItem = (CvPrintJobItem)items.get(0);
numPages = CfBatchPrintUtil.getItemTotalPages(firstItem);
logger.info("Item Total Pages == " + numPages);
logger.info("Job Item Page Limit == " +
printConfig.getJobItemPageLimit());
consolidatedPrintJob.setSequence(firstItem.getSequence());
}
if (numPages <= printConfig.getJobItemPageLimit())
{
consolidatedPrintJob.setHasLargeItems(false);
logger.info("Item setHasLargeItems == false");
}
else
{
consolidatedPrintJob.setHasLargeItems(true);
logger.info("Item setHasLargeItems == true");
}
}
ArrayList startBannerDataList = new ArrayList();
ArrayList barcodeList = new ArrayList();
ArrayList barcodeCorresPageCount = new ArrayList();
ArrayList statementNumberList = new ArrayList();
for (int i = 0; i < duplexJobs.size(); i++)
{
CvPrintJob job = (CvPrintJob)duplexJobs.get(i);
if ((prevJobId == null) ||
(!prevJobId.equalsIgnoreCase(job.getJobId()))) {
docCount = 0;
}
populateItemDocuments(job, printConfig.geteCMObjectStore(), printConfig.geteCMUserId());
}
consolidatedPrintJob.setPrintJobItems(printItems);
}
else
{
logger.info("====================================================================");
logger.info("=================>> No DUPLEX jobs to process <<===================");
logger.info("====================================================================");
}
duplexJobs = null;
this.latch.countDown();
System.gc();
return null;
}catch(Exception e) {
e.printStackTrace();
return null;
}
}
public static void populateItemDocuments(CvPrintJob job, String objectStore, String userid)
throws CfException
{
logger.info("Enters populateItemDocuments");
try
{
ArrayList items = job.getPrintJobItems();
job.setIsProcess(true);
ArrayList modelDocList = null;
logger.info("Items size::::::" + items.size());
for (int i = 0; i < items.size(); i++)
{
modelDocList = new ArrayList();
CvPrintJobItem x = (CvPrintJobItem)items.get(i);
ArrayList guidList = x.getGuidList();
if ((guidList != null) && (!guidList.isEmpty())) {
modelDocList.addAll(guidList);
}
logger.info("guidList size::::::" + guidList.size());
CvRenderPayloadRequest cvRenderPayloadRequest = null;
if ((modelDocList != null) && (!modelDocList.isEmpty()))
{
cvRenderPayloadRequest = new CvRenderPayloadRequest();
logger.info("Before creating CvRenderPayloadRequest");
logger.info("Document Class::: " +
x.getDocumentClass());
cvRenderPayloadRequest.setDocumentClass(
x.getDocumentClass());
cvRenderPayloadRequest.setGuid(modelDocList);
cvRenderPayloadRequest.setUserId(userid);
logger.info("After creating the CvRenderPayloadRequest");
try
{
if (cvRenderPayloadRequest != null)
{
List pdfContents = docmgmtClient.retrieveDocument(cvRenderPayloadRequest.getGuid());
if ((pdfContents != null) &&
(!pdfContents.isEmpty()))
{
logger.info(
"PDF contents sizenew::::::::::::::" + pdfContents.size());
Iterator pdfItr = pdfContents.iterator();
while (pdfItr.hasNext())
{
byte[] contents = (byte[])pdfItr.next();
CvPrintJobItem item = (CvPrintJobItem)items.get(i);
item.addDocumentList(contents);
int filenetpagecount = 100;
item.setPageCountFromFileNet(filenetpagecount);
logger.info("PageCOunt from Filenet " + filenetpagecount);
}
}
}
}
catch (Exception e)
{
e.printStackTrace();
throw new CfException(" Error populating documents" + e);
}
}
}
}
catch (Exception e)
{
e.printStackTrace();
throw new CfException(" Error populating documents" + e);
}
logger.info("Exits populateItemDocuments");
}
First of all you are using Tomcat server that runs the application. If you want to make standalone spring application you can configure like below
#Configuration
public class ApplicationMain {
#Bean
public Stackoverflow stackoverflow() {
return new Stackoverflow ();
}
public static void main(String[] args) {
ConfigurableApplicationContext configurableApplicationContext = new AnnotationConfigApplicationContext(ApplicationMain.class);
System.out.println(configurableApplicationContext.getBean("stackoverflow"));
}
}
'JVM is not getting shutdown after completion.' is normal behavior for Tomcat server because it waits for request to handle.
You can give basepackage like below
new AnnotationConfigApplicationContext("com.example");
it will scan the package for you

Not getting result : working with completableFuture

I suppose to get the response from two API and then only move forward. To achieve this tried to use completableFuture but ending up in getting NullPointerException, when fetching response from 'result' object.
Infact, completeableFuture basically not have data.
Not able to debug the thread working directly.
public APIResult execute() throws InterruptedException, ExecutionException {
CompletableFuture<TaskChair> completableFutureChair = CompletableFuture.supplyAsync(()->new TaskChair(),executorChair);
CompletableFuture<TaskBottle> completableFutureBottle = CompletableFuture.supplyAsync(()->new TaskBottle(),executorChair);
CompletableFuture<Void> combinedFuture = CompletableFuture.allOf(completableFutureChair, completableFutureBottle);
combinedFuture.get();
TaskChair taskChair = completableFutureChair.get();
TaskBottle taskBottle = completableFutureBottle.get();
List<Chair> chairs = taskChair.getChairs();
List<Bottle> bottles = taskBottle.getBottles();
APIResult result = new APIResult(chairs, bottles);
return result;
}
class TaskChair implements Callable<List<Chair>>{
List<Chair> chairs;
public List<Chair> getChairs() {
return chairs;
}
public void setChairs(List<Chair> chairs) {
this.chairs = chairs;
}
#Override
public List<Chair> call() throws Exception {
chairs = new RestAPI().getChairs();
return chairs;
}
}
public static void main(String[] args) {
RestService service = new RestService();
APIResult result = null;
try {
result = service.execute();
} catch (InterruptedException | ExecutionException e) { }
System.out.println("Chair API Status -> ");
for(Chair chair:result.getChairs()) {
System.out.println(" id : "+chair.getId()+" name : "+ chair.getName());
}
}

Using non-blocking code from blocking code to reduce number of threads being used

In case of blocking APIs the caller expects code to return some value.
e.g. response = blockingAPI.execute()
In case of non-blocking code, we communicate via callbacks.
The overall request (web) pipeline is blocking and sequential. Steps depend on response of previous steps. In one of the step we need to call a web service. The current setup assumes this service to be called using blocking API.
How can we call it in a non-blocking way and still return to pipeline like a blocking code? The main reason for trying a non-blocking call here is to be able to serve more HTTP Requests per server by not using a new thread per service call. The intention is not to perform some other task while IO is complete, as sequence needs to be maintained. One of the way could be to get Future, loop and check if it is done and sleep in between. Which sounds bad.
We are using Spring and have custom execution pipeline per request. I was thinking of using async-http-client for service call.
Sample Sync Client
#Component
public class SyncClient {
private static final int CONNECTION_TIMEOUT = 1000;
private static final int CONNECTION_REQUEST_TIMEOUT = 1000;
private static final int SOCKET_TIMEOUT = 1000;
private static final String URL = "http://localhost:8080/returnback";
private static final String RETURN_BACK = "returnBack";
private final HttpClientBuilder httpClientBuilder;
public SyncClient() {
httpClientBuilder = HttpClientBuilder.create();
httpClientBuilder.useSystemProperties();
httpClientBuilder.setRedirectStrategy(new LaxRedirectStrategy());
}
public Map<String, String> send() throws Exception {
final CloseableHttpClient closeableHttpClient = httpClientBuilder.build();
final HttpRequestBase httpRequestBase = new HttpGet(URL);
httpRequestBase.setConfig(getCustomConfig());
final String returnBackValue = UUID.randomUUID().toString();
httpRequestBase.addHeader(new BasicHeader(RETURN_BACK, returnBackValue));
CloseableHttpResponse response = null;
final Map<String, String> output = new HashMap<>();
try {
response = closeableHttpClient.execute(httpRequestBase);
if(response == null) {
throw new RuntimeException("Unexpected null http response");
}
output.put(returnBackValue, response.getFirstHeader(RETURN_BACK).getValue());
}
finally {
HttpClientUtils.closeQuietly(response);
}
return output;
}
private RequestConfig getCustomConfig() {
return RequestConfig.copy(RequestConfig.DEFAULT)
.setConnectTimeout(CONNECTION_TIMEOUT)
.setSocketTimeout(SOCKET_TIMEOUT)
.setConnectionRequestTimeout(CONNECTION_REQUEST_TIMEOUT).build();
}
}
Sample Polling Async Client
#Component
public class PollingAsyncClient {
private static final String URL = "http://localhost:8080/returnback";
private static final String RETURN_BACK = "returnBack";
private static final int CONNECTION_TIMEOUT = 1000;
private static final int REQUEST_TIMEOUT = 1000;
private final AsyncHttpClient asyncHttpClient;
public PollingAsyncClient() {
final AsyncHttpClientConfig asyncHttpClientConfig = new DefaultAsyncHttpClientConfig.Builder()
.setMaxConnections(200)
.setMaxConnectionsPerHost(20)
.setConnectTimeout(CONNECTION_TIMEOUT).build();
this.asyncHttpClient = new DefaultAsyncHttpClient(asyncHttpClientConfig);
}
public Map<String, String> send() {
final RequestBuilder requestBuilder = new RequestBuilder("GET");
final Request request = requestBuilder.setUrl(URL)
.setRequestTimeout(REQUEST_TIMEOUT)
.build();
final String returnBackValue = UUID.randomUUID().toString();
requestBuilder.addHeader(RETURN_BACK,returnBackValue);
ListenableFuture<Response> listenableFuture = null;
final Map<String, String> output = new HashMap<>();
try {
listenableFuture = asyncHttpClient.executeRequest(request);
while (!listenableFuture.isDone()) {
Thread.sleep(50);
}
Response response = listenableFuture.get();
output.put(returnBackValue, response.getHeader(RETURN_BACK));
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return output;
}
}
Now the consumer can call both the clients as:
Map<String, String> map = null;
try {
map = client.send();
} catch (Exception e) {
e.printStackTrace();
}
And thus to the consumer, both of them are blocking. But per my understanding non-blocking client will consume less resources (threads). But this sleep loop will eat up CPU. Another way could be to use wait and notify. But that would result in creating lot of new objects per request. I will try that and update, though.
EDIT (for comments):
I said "sleep loop" will eat up CPU i.e. while loop will eat up, that's what I meant. As the thread will wake up and try again.
EDIT:
Sample Waiting Async Client
public class WaitingAsyncClient {
private static final String URL = "http://localhost:8080/returnback";
private static final String RETURN_BACK = "returnBack";
private static final int CONNECTION_TIMEOUT = 2000;
private static final int REQUEST_TIMEOUT = 2000;
private final AsyncHttpClient asyncHttpClient;
public WaitingAsyncClient() {
final AsyncHttpClientConfig asyncHttpClientConfig = new DefaultAsyncHttpClientConfig.Builder()
.setMaxConnections(200)
.setMaxConnectionsPerHost(20)
.setConnectTimeout(CONNECTION_TIMEOUT).build();
this.asyncHttpClient = new DefaultAsyncHttpClient(asyncHttpClientConfig);
}
public Map<String, String> send() {
final Object lock = new Object();
final RequestBuilder requestBuilder = new RequestBuilder("GET");
final Request request = requestBuilder.setUrl(URL)
.setRequestTimeout(REQUEST_TIMEOUT)
.build();
final String returnBackValue = UUID.randomUUID().toString();
requestBuilder.addHeader(RETURN_BACK,returnBackValue);
ListenableFuture<Response> listenableFuture = null;
final Map<String, String> output = new HashMap<>();
try {
listenableFuture = asyncHttpClient.executeRequest(request, new AsyncHandler<Response>() {
private final Response.ResponseBuilder builder = new Response.ResponseBuilder();
#Override
public void onThrowable(Throwable t) {
synchronized (lock) {
lock.notify();
}
}
#Override
public State onBodyPartReceived(HttpResponseBodyPart bodyPart) throws Exception {
builder.accumulate(bodyPart);
return State.CONTINUE;
}
#Override
public State onStatusReceived(HttpResponseStatus responseStatus) throws Exception {
builder.accumulate(responseStatus);
return State.CONTINUE;
}
#Override
public State onHeadersReceived(HttpResponseHeaders headers) throws Exception {
builder.accumulate(headers);
return State.CONTINUE;
}
#Override
public Response onCompleted() throws Exception {
synchronized (lock) {
lock.notify();
}
return builder.build();
}
});
synchronized (lock) {
lock.wait();
}
Response response = listenableFuture.get();
output.put(returnBackValue, response.getHeader(RETURN_BACK));
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return output;
}
}
Now, I think I need to do a performance check.

Thread does not run

I have some places in a excel file, each of the point have a lng and lat coordinate.
Now I try to create a static Map for each point using the google map static map api.
And I have Two component, a parser and a loader.
The Parser is used to read the excel file while the loaded is used to load tiles.
And I make the loader run in a seprate Thread.
public class Parser {
private static Parser instance;
private StaticMapLoader loader;
private Parser(StaticMapLoader loader) {
this.loader = loader;
}
public synchronized static Parser getInstance(StaticMapLoader loader) {
if (instance == null) {
instance = new Parser(loader);
}
return instance;
}
public void parse(String path) {
List<Branch> result = new ArrayList<Branch>();
InputStream inp;
try {
inp = new FileInputStream(path);
Workbook wb = WorkbookFactory.create(inp);
Sheet sheet = wb.getSheetAt(0);
int rows = sheet.getLastRowNum();
for(Row r : sheet.getRows){
loader.addTask(r.type,r.name,r.x,r.y);
}
} catch (InvalidFormatException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Branch bc = new Branch("网点1", null, null);
return result;
}
}
Loader:
public class StaticMapLoader extends Thread {
private final static Logger log = Logger.getLogger(StaticMapLoader.class);
private List<Task> tasks = new ArrayList<Task>();
private String tilePath;
private boolean running = false;
public StaticMapLoader(String saveDir) {
this.tilePath = saveDir;
}
#Override
public void run() {
while (running) {
log.debug("run " + tasks.size());
if (tasks.size() > 0) {
Task t = tasks.get(0);
if (t != null && t.status == Status.waiting) {
tasks.remove(0);
t.status = Status.running;
downLoad(t);
}
}
}
}
private void downLoad(Task t) {
log.debug(String.format("load data for " + t.toString()));
//down tiles and save
t.status=Status.success;
}
public void addTask(String type, String name, double x, double y) {
log.debug(String.format("add task of :%s,%s", type, name));
tasks.add(new Task(type,name,x,y));
}
public void startRunning() {
running = true;
this.start();
}
public void stopRunning() {
running = false;
this.interrupt();
}
class Task {
Status status = Status.waiting;
String type, name;
double x,y;
Task(String type, String name, double x,double y) {
this.type = type;
this.name = name;
this.xian = xian;
this.x = x;
this.y = y;
}
}
enum Status {
waiting, running, fail, success
}
}
The process is rather simple, the StaticMapLoader have a field of ArrayList. While the Parser parse a record(place), it will be thrown to the list.
And the loader will iterator the list and download the data.
However I meet a strange problem here:
#Override
public void run() {
while (running) {
log.debug("run " + tasks.size());
if (tasks.size() > 0) {
Task t = tasks.get(0);
if (t != null && t.status == Status.waiting) {
tasks.remove(0);
t.status = Status.running;
downLoad(t);
}
}
}
}
The above codes runs, and I will get the logs like this:
run 1
add task of ..
run 2
add task of ...
However , if I comment the log line, the downLoad will be never called, I will get:
run 1
run 2
......
It seems that this may be caused by the Thread , do I miss anything?
BTW, the above codes ran inside the HttpServlet context, and I start them like this:
#Override
public void init() throws ServletException {
super.init();
try {
URL fileUrl = getServletContext().getResource(getInitParameter("xlsxFile"));
URL tilePath = getServletContext().getResource(getInitParameter("tilePath"));
StaticMapLoader loader = new StaticMapLoader(tilePath.getPath());
loader.startRunning();
Parser.getInstance(loader).parse(fileUrl.getPath());
} catch (MalformedURLException e) {
}
}

How to access Hibernate session from src folder?

I would like to know how to access the Service and Domains properly in this sample class placed in src/java folder
public class NewsIngestion implements Runnable {
private String str;
private int num;
private Logger log = Logger.getLogger("grails.app");
private static boolean isRunning;
private Thread t;
private WorkerJobService jobService;
private NewsService newsService;
public NewsIngestion(String s, int n)
{
jobService = new WorkerJobService();
newsService = new NewsService();
str = s;
num = n;
isRunning = false;
t = new Thread (this, "NewsIngestion");
}
public void run ()
{
while(isRunning){
try{
if(jobService.isJobEnabled("ConsumeFeedsJob") && jobService.lockJob("ConsumeFeedsJob")){
log.info("${this.class.name}: ConsumeFeedsJob started");
try{
// get all sources
List sources = (List) InvokerHelper.invokeMethod(RSSFeed.class, "list", null);
for(int i = 0; i < sources.size(); i++) {
RSSFeed s = (RSSFeed) sources.get(i);
// check if it's time to read the source
int diff = DateTimeUtil.getSecondsDateDiff(s.getLastChecked(), new Date());
if(s.getLastChecked() == null || diff >= s.getCheckInterval()){
List keyword_list = (List) InvokerHelper.invokeMethod(Keyword.class, "list", null);
for(int j = 0; j < keyword_list.size(); j++) {
String keyword = (String) keyword_list.get(j);
try{
newsService.ingestNewsFromSources(keyword, s);
}catch(Exception e){
log.error("${this.class.name}: ${e}");
}
log.debug("Completed reading feeds for ${keyword}.");
log.info("${this.class.name}: Reading feeds for '${keyword}' (${s.feedName}) took ${Float.toString(st2.getDuration())} second(s).");
}
s.setLastChecked(new Date());
InvokerHelper.invokeMethod(RSSFeed.class, "save", null);
}
log.info("${this.class.name}: Reading feeds for '${s.feedName}' for all keywords took ${Float.toString(st.getDuration())} second(s).");
}
}catch(Exception e){
log.error("${this.class.name}: Exception: ${e}");
}
log.info("${this.class.name}: ConsumeFeedsJob ended.");
// unlock job
jobService.unlockJob("ConsumeFeedsJob");
}
log.info("alfred: success");
}
catch (Exception e){
log.info("alfred exception: " + e.getMessage());
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
log.info(e.getMessage());
}
}
}
public void start() {
if(t == null){
t = new Thread (this, "NewsIngestion");
}
if(!isRunning){
isRunning = true;
t.start();
}
}
public void stop() {
isRunning = false;
}
public boolean isRunning() {
return isRunning;
}
}
I'm encountering this error message:
No Hibernate Session bound to thread,
and configuration does not allow
creation of non-transactional one here
Thanks.
You shouldn't instantiate the service class by yourself, but instead take the class instance from the main context
import org.codehaus.groovy.grails.commons.ApplicationHolder
def ctx = ApplicationHolder.application.mainContext
def newsService = ctx.newsService
If you're using Java
import org.codehaus.groovy.grails.commons.ApplicationHolder
public class SomeClass {
SomeService someService;
public SomeClass() {
someService = (SomeService) ApplicationHolder.getApplication().getMainContext().getBean("someService");
}
}
Consider using Spring and #Transactional annotation or AOP.

Categories