Check the below code. There is a problem in multi threading where results from each thread are swapped
We have tried using withWheelColor.isDone and withWheelColor.isCancelled and also surrounded it with withWheelColor.get() but no luck
final Map<String, Object> result = new HashMap<>();
final ExecutorService executor = Executors.newFixedThreadPool(2);
ProductSearchPageData<SearchStateData, ProductData> searchPageData = null;
Future<ProductSearchPageData<SearchStateData, ProductData>> withWheelColor= null;
Future<ProductSearchPageData<SearchStateData, ProductData>> withoutWheelColor = null;
final LinkedHashMap<String, String> sortedParams = getSortedParams(form.getParams(), form.getLastParam());
final PageableData pageableData = createPageableData(0,
getSearchPageSize(), null, ShowMode.Page);
final SearchStateData searchState = new SearchStateData();
searchState.setPdpFlow(Boolean.TRUE);
final SearchQueryData searchQueryData = new SearchQueryData();
String productCode = null;
try {
Callable<ProductSearchPageData<SearchStateData, ProductData>> withWheelColorThread = new DTCallable<ProductSearchPageData<SearchStateData, ProductData>>(
null) {
#Override
public ProductSearchPageData<SearchStateData, ProductData> callMe(
String pk) throws Exception {
buildWithParams(sortedParams, searchState,
searchQueryData, form);
return productSearchFacade.textSearch(searchState,
pageableData);
}
};
withWheelColor = executor.submit(withWheelColorThread);
if (sortedParams.containsKey(WheelProductModel.WHEELCOLOR)
&& sortedParams.size() > 1) {
Callable<ProductSearchPageData<SearchStateData, ProductData>> withoutWheelColorThread = new DTCallable<ProductSearchPageData<SearchStateData, ProductData>>(
null) {
#Override
public ProductSearchPageData<SearchStateData, ProductData> callMe(
String pk) throws Exception {
sortedParams.remove(WheelProductModel.WHEELCOLOR);//Call without wheelColor
buildWithParams(sortedParams, searchState,
searchQueryData, form);
return productSearchFacade.textSearch(searchState,
pageableData);
}
};
withoutWheelColor = executor.submit(withoutWheelColorThread);
}
executor.shutdown();
if (!executor.awaitTermination(120, TimeUnit.SECONDS)) {
if(LOG.isDebugEnabled()) {
LOG.debug("No Result Found!");
}
}
if (null != withoutWheelColor
&& null != withoutWheelColor.get()
&& CollectionUtils.isNotEmpty(withoutWheelColor.get()
.getFacets())) {
for (final FacetData<SearchStateData> facet : withoutWheelColor
.get().getFacets()) {
if (null != facet.getCode() && facet.getCode()
.equals(WheelProductModel.WHEELCOLOR)) {
result.put("availableColors", facet.getValues());
}
}
}
searchPageData = withWheelColor.get();
} catch (final Exception e) {
LOG.error("Error getting PDP results : " + e.getMessage());
}
processResult(request, result, searchPageData, productCode);
return result;
Expected is withWheelColor only to be in searchPageData and not withoutWheelColor. What is missing in the above to prevent swapping?
Related
I want to know some of the reasons that can cause below exception. I am unable to find this message Cannot find message in jsch-0.1.54.jar. There exists some straight forward messages like file not found and others that make sense. But I need more information about this one so that I can reach to the root cause.
SftpException while running get ---> 2: Cannot find message [/destination/file.txt]
at com.jcraft.jsch.ChannelSftp.throwStatusError(ChannelSftp.java:2289)
at com.jcraft.jsch.ChannelSftp._stat(ChannelSftp.java:1741)
at com.jcraft.jsch.ChannelSftp._stat(ChannelSftp.java:1758)
at com.jcraft.jsch.ChannelSftp.get(ChannelSftp.java:786)
at com.jcraft.jsch.ChannelSftp.get(ChannelSftp.java:750)
at com.iyi.ftp.SFTP.get(SFTP.java:99)
Here is my calling method.
public boolean get(final String remoteFile, final String localFile) throws JSchException {
Vector connection = null;
Session session = null;
ChannelSftp c = null;
boolean status = false;
try {
connection = this.connect();
session = connection.get(0);
c = connection.get(1);
c.get(remoteFile, localFile);
status = true;
}
catch (JSchException e) {
SFTP.LGR.warn((Object)("JSchException in SFTP::get() ---> " + FTPFactory.getStackTrace((Throwable)e)));
throw e;
}
catch (SftpException e2) {
SFTP.LGR.warn((Object)("SftpException while running get ---> " + FTPFactory.getStackTrace((Throwable)e2)));
throw new JSchException(e2.getMessage());
}
catch (CredentialDecryptionException e3) {
SFTP.LGR.error((Object)"##CredentialDecryptionException##", (Throwable)e3);
throw new JSchException(e3.getMessage(), (Throwable)e3);
}
finally {
if (c != null) {
c.quit();
}
if (session != null) {
session.disconnect();
}
}
if (c != null) {
c.quit();
}
if (session != null) {
session.disconnect();
}
return status;
}
These methods are fetched from jsch-0.1.54.jar which is an open source utility.
public void get(String src, String dst, final SftpProgressMonitor monitor, final int mode) throws SftpException {
boolean _dstExist = false;
String _dst = null;
try {
((MyPipedInputStream)this.io_in).updateReadSide();
src = this.remoteAbsolutePath(src);
dst = this.localAbsolutePath(dst);
final Vector v = this.glob_remote(src);
final int vsize = v.size();
if (vsize == 0) {
throw new SftpException(2, "No such file");
}
final File dstFile = new File(dst);
final boolean isDstDir = dstFile.isDirectory();
StringBuffer dstsb = null;
if (isDstDir) {
if (!dst.endsWith(ChannelSftp.file_separator)) {
dst += ChannelSftp.file_separator;
}
dstsb = new StringBuffer(dst);
}
else if (vsize > 1) {
throw new SftpException(4, "Copying multiple files, but destination is missing or a file.");
}
for (int j = 0; j < vsize; ++j) {
final String _src = v.elementAt(j);
final SftpATTRS attr = this._stat(_src);
if (attr.isDir()) {
throw new SftpException(4, "not supported to get directory " + _src);
}
_dst = null;
if (isDstDir) {
final int i = _src.lastIndexOf(47);
if (i == -1) {
dstsb.append(_src);
}
else {
dstsb.append(_src.substring(i + 1));
}
_dst = dstsb.toString();
if (_dst.indexOf("..") != -1) {
final String dstc = new File(dst).getCanonicalPath();
final String _dstc = new File(_dst).getCanonicalPath();
if (_dstc.length() <= dstc.length() || !_dstc.substring(0, dstc.length() + 1).equals(dstc + ChannelSftp.file_separator)) {
throw new SftpException(4, "writing to an unexpected file " + _src);
}
}
dstsb.delete(dst.length(), _dst.length());
}
else {
_dst = dst;
}
final File _dstFile = new File(_dst);
if (mode == 1) {
final long size_of_src = attr.getSize();
final long size_of_dst = _dstFile.length();
if (size_of_dst > size_of_src) {
throw new SftpException(4, "failed to resume for " + _dst);
}
if (size_of_dst == size_of_src) {
return;
}
}
if (monitor != null) {
monitor.init(1, _src, _dst, attr.getSize());
if (mode == 1) {
monitor.count(_dstFile.length());
}
}
FileOutputStream fos = null;
_dstExist = _dstFile.exists();
try {
if (mode == 0) {
fos = new FileOutputStream(_dst);
}
else {
fos = new FileOutputStream(_dst, true);
}
this._get(_src, fos, monitor, mode, new File(_dst).length());
}
finally {
if (fos != null) {
fos.close();
}
}
}
}
catch (Exception e) {
if (!_dstExist && _dst != null) {
final File _dstFile2 = new File(_dst);
if (_dstFile2.exists() && _dstFile2.length() == 0L) {
_dstFile2.delete();
}
}
if (e instanceof SftpException) {
throw (SftpException)e;
}
if (e instanceof Throwable) {
throw new SftpException(4, "", e);
}
throw new SftpException(4, "");
}
}
private SftpATTRS _stat(final byte[] path) throws SftpException {
try {
this.sendSTAT(path);
Header header = new Header();
header = this.header(this.buf, header);
final int length = header.length;
final int type = header.type;
this.fill(this.buf, length);
if (type != 105) {
if (type == 101) {
final int i = this.buf.getInt();
this.throwStatusError(this.buf, i);
}
throw new SftpException(4, "");
}
final SftpATTRS attr = SftpATTRS.getATTR(this.buf);
return attr;
}
catch (Exception e) {
if (e instanceof SftpException) {
throw (SftpException)e;
}
if (e instanceof Throwable) {
throw new SftpException(4, "", e);
}
throw new SftpException(4, "");
}
}
The error message comes from your server. It's indeed quite strange message, but I assume that it's some custom SFTP server that deals with some "messages" rather than plain files.
So the message basically translates to "Cannot find file" error of a traditional SFTP server. Even the error code 2 (SSH_FX_NO_SUCH_FILE) supports that.
Your path in remoteFile is probably wrong.
I'd like to use zuul to cache some requests. The Cache is stored in a Redis as a POJO and contains plaintext (not gzip compressed data).
For normal tests and integration tests, everything works pretty well. With a jmeter load test, some of the requests fails with
java.util.zip.ZipException: Not in GZIP format (from jmeter)
We figure out, that at this point, zuul is returning an empty response.
My PreFilter:
public class CachePreFilter extends CacheBaseFilter {
private static DynamicIntProperty INITIAL_STREAM_BUFFER_SIZE = DynamicPropertyFactory.getInstance().getIntProperty(ZuulConstants.ZUUL_INITIAL_STREAM_BUFFER_SIZE, 8192);
#Autowired
CounterService counterService;
public CachePreFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
CachedResponse data = getFromCache(ctx);
if (null != data) {
counterService.increment("counter.cached");
HttpServletResponse response = ctx.getResponse();
response.addHeader("X-Cache", "HIT");
if (null != data.getContentType()) {
response.setContentType(data.getContentType());
}
if (null != data.getHeaders()) {
for (Entry<String, String> header : data.getHeaders().entrySet()) {
if (!response.containsHeader(header.getKey())) {
response.addHeader(header.getKey(), header.getValue());
}
}
}
OutputStream outStream = null;
try {
outStream = response.getOutputStream();
boolean isGzipRequested = ctx.isGzipRequested();
if (null != data.getBody()) {
final String requestEncoding = ctx.getRequest().getHeader(ZuulHeaders.ACCEPT_ENCODING);
if (requestEncoding != null && HTTPRequestUtils.getInstance().isGzipped(requestEncoding)) {
isGzipRequested = true;
}
ByteArrayOutputStream byteArrayOutputStream = null;
ByteArrayInputStream is = null;
try {
if (isGzipRequested) {
byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
gzipOutputStream.write(data.getBody().getBytes(StandardCharsets.UTF_8));
gzipOutputStream.flush();
gzipOutputStream.close();
ctx.setResponseGZipped(true);
is = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
logger.debug(String.format("Send gzip content %s", data.getBody()));
response.setHeader(ZuulHeaders.CONTENT_ENCODING, "gzip");
} else {
logger.debug(String.format("Send content %s", data.getBody()));
is = new ByteArrayInputStream(data.getBody().getBytes(StandardCharsets.UTF_8));
}
writeResponse(is, outStream);
} catch (Exception e) {
logger.error("Error at sending response " + e.getMessage(), e);
throw new RuntimeException("Failed to send content", e);
} finally {
if (null != byteArrayOutputStream) {
byteArrayOutputStream.close();
}
if (null != is) {
is.close();
}
}
}
ctx.setSendZuulResponse(false);
} catch (IOException e) {
logger.error("Cannot read from Stream " + e.getMessage(), e.getMessage());
} finally {
// don't close the outputstream
}
ctx.set(CACHE_HIT, true);
return data;
} else {
counterService.increment("counter.notcached");
}
ctx.set(CACHE_HIT, false);
return null;
}
private ThreadLocal<byte[]> buffers = new ThreadLocal<byte[]>() {
#Override
protected byte[] initialValue() {
return new byte[INITIAL_STREAM_BUFFER_SIZE.get()];
}
};
private void writeResponse(InputStream zin, OutputStream out) throws Exception {
byte[] bytes = buffers.get();
int bytesRead = -1;
while ((bytesRead = zin.read(bytes)) != -1) {
out.write(bytes, 0, bytesRead);
}
}
#Override
public int filterOrder() {
return 99;
}
#Override
public String filterType() {
return "pre";
}
}
My Post Filter
public class CachePostFilter extends CacheBaseFilter {
public CachePostFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public boolean shouldFilter() {
RequestContext ctx = RequestContext.getCurrentContext();
return super.shouldFilter() && !ctx.getBoolean(CACHE_HIT);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest req = ctx.getRequest();
HttpServletResponse res = ctx.getResponse();
if (isSuccess(res, ctx.getOriginResponseHeaders())) {
// Store only successful responses
String cacheKey = cacheKey(req);
if (cacheKey != null) {
String body = null;
if (null != ctx.getResponseBody()) {
body = ctx.getResponseBody();
} else if (null != ctx.getResponseDataStream()) {
InputStream is = null;
try {
is = ctx.getResponseDataStream();
final Long len = ctx.getOriginContentLength();
if (len == null || len > 0) {
if (ctx.getResponseGZipped()) {
is = new GZIPInputStream(is);
}
StringWriter writer = new StringWriter();
IOUtils.copy(is, writer, "UTF-8");
body = writer.toString();
if (null != body && !body.isEmpty()) {
ctx.setResponseDataStream(new ByteArrayInputStream(body.getBytes()));
ctx.setResponseGZipped(false);
ctx.setOriginContentLength(String.valueOf(body.getBytes().length));
} else {
ctx.setResponseBody("{}");
}
}
} catch (IOException e) {
logger.error("Cannot read body " + e.getMessage(), e);
} finally {
if (null != is) {
try {
is.close();
} catch (IOException e) {
}
}
}
saveToCache(ctx, cacheKey, body);
}
}
}
return null;
}
#Override
public int filterOrder() {
return 1;
}
#Override
public String filterType() {
return "post";
}
private boolean isSuccess(HttpServletResponse res, List<Pair<String, String>> originHeaders) {
if (res != null && res.getStatus() < 300) {
if (null != originHeaders) {
for (Pair<String, String> header : originHeaders) {
if (header.first().equals("X-CACHEABLE") && header.second().equals("1")) {
return true;
}
}
}
}
return false;
}
We test it without Redis (just store it into a local variable) and this is still the same. We logged always the response from cache (before gzip) and everything looks good.
(Posted on behalf of the question author).
Solution
We refactor our PostFilter and don't change so much in the Response for zuul. After this change, we don't see any problems any more:
Working Post Filter
public class CachePostFilter extends CacheBaseFilter {
public CachePostFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public boolean shouldFilter() {
RequestContext ctx = RequestContext.getCurrentContext();
return super.shouldFilter() && !ctx.getBoolean(CACHE_HIT);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest req = ctx.getRequest();
HttpServletResponse res = ctx.getResponse();
if (isSuccess(res, ctx.getOriginResponseHeaders())) {
// Store only successful responses
String cacheKey = cacheKey(req);
if (cacheKey != null) {
String body = null;
if (null != ctx.getResponseBody()) {
body = ctx.getResponseBody();
} else if (null != ctx.getResponseDataStream()) {
InputStream rawInputStream = null;
InputStream gzipByteArrayInputStream = null;
try {
rawInputStream = ctx.getResponseDataStream();
gzipByteArrayInputStream = null;
// If origin tell it's GZipped but the content is ZERO
// bytes,
// don't try to uncompress
final Long len = ctx.getOriginContentLength();
if (len == null || len > 0) {
byte[] rawData = IOUtils.toByteArray(rawInputStream);
ctx.setResponseDataStream(new ByteArrayInputStream(rawData));
if (ctx.getResponseGZipped()) {
gzipByteArrayInputStream = new GZIPInputStream(new ByteArrayInputStream(rawData));
} else {
gzipByteArrayInputStream = new ByteArrayInputStream(rawData);
}
StringWriter writer = new StringWriter();
IOUtils.copy(gzipByteArrayInputStream, writer, "UTF-8");
body = writer.toString();
}
} catch (IOException e) {
logger.error("Cannot read body " + e.getMessage(), e);
} finally {
if (null != rawInputStream) {
try {
rawInputStream.close();
} catch (IOException e) {
}
}
if (null != gzipByteArrayInputStream) {
try {
gzipByteArrayInputStream.close();
} catch (IOException e) {
}
}
}
// if we read from the stream, the other filter cannot read
// and they dont' deliver any response
// ctx.setResponseBody(body);
// ctx.setResponseGZipped(false);
saveToCache(ctx, cacheKey, body);
}
}
}
return null;
}
#Override
public int filterOrder() {
return 1;
}
#Override
public String filterType() {
return "post";
}
private boolean isSuccess(HttpServletResponse res, List<Pair<String, String>> originHeaders) {
if (res != null && res.getStatus() == 200) {
if (null != originHeaders) {
for (Pair<String, String> header : originHeaders) {
if (header.first().equals("X-CACHEABLE") && header.second().equals("1")) {
return true;
}
}
}
}
return false;
}
}
I have a huge CSV file ~800 K records and using that data I have to form a POST request and make a rest call.
Initially I tried WITHOUT using threads but it is taking very long time to process it.
So I thought of using threads to speed up the process and followed below approach.
divided file into relatively smaller files (Tried with 3 files with approx ~5K data each file). (I Did this Manually Before passing to application)
Created 3 threads (traditional way by extending Thread class)
Reads each file with each thread and form HashMap with details required to form request and added it to ArrayList of POST Request
Send Post request to in loop
List item
Get the Response and add it to Response List
Both above approach takes too long to even complete half (>3 hrs). Might be one of them would not be good approach.
Could anyone please provide suggestion which helps me speeding up the process ? It is NOT mandatory to use threading.
Just to add that my code is at the consumer end and does not have much control on producer end.
MultiThreadedFileRead (Main class that Extends Thread)
class MultiThreadedFileRead extends Thread
{
public static final String EMPTY = "";
public static Logger logger = Logger.getLogger(MultiThreadedFileRead.class);
BufferedReader bReader = null;
String filename = null;
int Request_Num = 0;
HashMap<Integer, String> filteredSrcDataMap = null;
List<BrandRQ> brandRQList = null;
List<BrandRS> brandRSList = null;
ReadDLShipFile readDLShipFile = new ReadDLShipFile();
GenerateAndProcessPostBrandAPI generateAndProcessPostBrandAPI = new GenerateAndProcessPostBrandAPI();
MultiThreadedFileRead()
{
}
MultiThreadedFileRead(String fname) throws Exception
{
filename = fname;
Request_Num = 0;
List<BrandRQ> brandRQList = new ArrayList<BrandRQ>();
List<BrandRS> brandRSList = new ArrayList<BrandRS>();
this.start();
}
public void run()
{
logger.debug("File *** : " + this.filename);
//Generate Source Data Map
HashMap<Integer, String> filteredSrcDataMap = (HashMap<Integer, String>) readDLShipFile.readSourceFileData(this.filename);
generateAndProcessPostBrandAPI.generateAndProcessPostRequest(filteredSrcDataMap, this);
}
public static void main(String args[]) throws Exception
{
String environment = ""; // TO BE UNCOMMENTED
String outPutfileName = ""; // TO BE UNCOMMENTED
BrandRetrivalProperties brProps = BrandRetrivalProperties.getInstance();
if(args != null && args.length == 2 && DLPremiumUtil.isNotEmpty(args[0]) && DLPremiumUtil.isNotEmpty(args[1])) // TO BE UNCOMMENTED
{
environment = args[0].trim();
outPutfileName = args[1].trim();
ApplicationInitializer applicationInitializer = new ApplicationInitializer(environment);
applicationInitializer.initializeParams();
logger.debug("Environment : " + environment);
logger.debug("Filename : " + outPutfileName);
// TO BE UNCOMMENTED - START
}else{
System.out.println("Invalid parameters passed. Expected paramenters: Environment");
System.out.println("Sample Request: java -jar BrandRetrival.jar [prd|int] brand.dat");
System.exit(1);
}
MultiThreadedFileRead multiThreadedFileRead = new MultiThreadedFileRead();
List<String> listOfFileNames = multiThreadedFileRead.getFileNames(brProps);
logger.debug("List : " + listOfFileNames);
int totalFileSize = listOfFileNames.size();
logger.debug("totalFileSize : " + totalFileSize);
MultiThreadedFileRead fr[]=new MultiThreadedFileRead[totalFileSize];
logger.debug("Size of Array : " + fr.length);
for(int i = 0; i < totalFileSize; i++)
{
logger.debug("\nFile Getting Added to Array : " + brProps.getCountFilePath()+listOfFileNames.get(i));
fr[i]=new MultiThreadedFileRead(brProps.getCountFilePath()+listOfFileNames.get(i));
}
}
public List<String> getFileNames(BrandRetrivalProperties brProps)
{
MultiThreadedFileRead multiThreadedFileRead1 = new MultiThreadedFileRead();
BufferedReader br = null;
String line = "";
String filename = multiThreadedFileRead1.getCounterFileName(brProps.getCountFilePath(), brProps.getCountFilePathStartsWith());
List<String> fileNameList = null;
logger.debug("filename : " + filename);
if(filename != null)
{
fileNameList = new ArrayList<String>();
try{
br = new BufferedReader(new FileReader(brProps.getCountFilePath()+filename));
while ((line = br.readLine()) != null)
{
fileNameList.add(line);
}
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e2) {
// TODO Auto-generated catch block ;
e2.printStackTrace();
} catch (Exception e3) {
// TODO Auto-generated catch block
e3.printStackTrace();
} finally
{
if (br != null)
{
try
{
br.close();
} catch (IOException e4)
{
e4.printStackTrace();
} catch (Exception e5)
{
e5.printStackTrace();
}
}
}
}
return fileNameList;
}
// Get the Name of the file which has name of individual CSV files to read to form the request
public String getCounterFileName(String sourceFilePath, String sourceFileNameStartsWith)
{
String fileName = null;
if(sourceFilePath != null && !sourceFilePath.equals(EMPTY) &&
sourceFileNameStartsWith != null && !sourceFileNameStartsWith.equals(EMPTY))
{
File filePath = new File(sourceFilePath);
File[] listOfFiles = filePath.listFiles();
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile() && listOfFiles[i].getName().startsWith(sourceFileNameStartsWith)) {
fileName = listOfFiles[i].getName();
} else if (listOfFiles[i].isDirectory()) {
fileName = null;
}
}
}
return fileName;
}
}
GenerateAndProcessPostBrandAPI
public class GenerateAndProcessPostBrandAPI {
public static Logger logger = Logger.getLogger(GenerateAndProcessPostBrandAPI.class);
List<BrandRQ> brandRQList = new ArrayList<BrandRQ>();
List<BrandRS> brandRSList = new ArrayList<BrandRS>();
DLPremiumClientPost dlPremiumclientPost = new DLPremiumClientPost();
JSONRequestGenerator jsonRequestGenerator = new JSONRequestGenerator();
public List<BrandRQ> getBrandRequstList(Map<Integer, String> filteredSrcDataMap)
{
if(filteredSrcDataMap != null)
{
brandRQList = jsonRequestGenerator.getBrandRQList(filteredSrcDataMap, brandRQList);
}
return brandRQList;
}
public void generateAndProcessPostRequest(Map<Integer, String> filteredSrcDataMap, MultiThreadedFileRead multiThreadedFileRead)
{
if(filteredSrcDataMap != null)
{
brandRQList = jsonRequestGenerator.getBrandRQList(filteredSrcDataMap, brandRQList);
if(brandRQList != null && brandRQList.size() > 0)
{
BrandRetrivalProperties brProps = BrandRetrivalProperties.getInstance();
String postBrandsEndpoint = brProps.getPostBrandsEndPoint();
for (BrandRQ brandRQ : brandRQList)
{
multiThreadedFileRead.Request_Num = multiThreadedFileRead.Request_Num + 1;
logger.debug("Brand Request - " + multiThreadedFileRead.Request_Num + " : " + ObjectToJSONCovertor.converToJSON(brandRQ));
BrandRS brandResponse = dlPremiumclientPost.loadBrandApiResponseJSON(brandRQ, postBrandsEndpoint, DLPremiumUtil.getTransactionID(), BrandConstants.AUTH_TOKEN);
logger.debug("Response - " + multiThreadedFileRead.Request_Num + " : " + ObjectToJSONCovertor.converToJSON(brandResponse));
brandRSList.add(brandResponse);
}
}
}
}
}
I am developing a voice-based app in android and facing some problems please see below code,
Java File 1
file = .wav file
public static AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException {
return getAudioInputStreamImpl(file);
}
private static AudioInputStream getAudioInputStreamImpl(Object source) throws UnsupportedAudioFileException, IOException {
GetAudioInputStreamAudioFileReaderAction action = new GetAudioInputStreamAudioFileReaderAction(source);
doAudioFileReaderIteration(action);
AudioInputStream audioInputStream = action.getAudioInputStream();
if (audioInputStream != null) {
return audioInputStream;
}
throw new UnsupportedAudioFileException("format not supported");
}
private static void doAudioFileReaderIteration(AudioFileReaderAction action) throws IOException {
Iterator audioFileReaders = TAudioConfig.getAudioFileReaders();
boolean bCompleted = false;
while (audioFileReaders.hasNext() && !bCompleted) {
AudioFileReader audioFileReader = (AudioFileReader) audioFileReaders.next();
bCompleted = action.handleAudioFileReader(audioFileReader);
}
}
Java file 2 (TAudioConfig)
public static synchronized Iterator<AudioFileReader> getAudioFileReaders() {
Iterator<AudioFileReader> it;
synchronized (TAudioConfig.class) {
it = getAudioFileReadersImpl().iterator();
}
return it;
}
private static synchronized Set<AudioFileReader> getAudioFileReadersImpl() {
Set<AudioFileReader> set;
synchronized (TAudioConfig.class) {
if (sm_audioFileReaders == null) {
sm_audioFileReaders = new ArraySet();
registerAudioFileReaders();
}
set = sm_audioFileReaders;
}
return set;
}
private static void registerAudioFileReaders() {
TInit.registerClasses(AudioFileReader.class, new C00001());
}
Java File 3 (TInit)
public static void registerClasses(Class providerClass, ProviderRegistrationAction action) {
Iterator providers = Service.providers(providerClass);
if (providers != null) {
while (providers.hasNext()) {
try {
action.register(providers.next());
} catch (Throwable e) {
}
}
}
}
Java File 4 (Service)
public static Iterator<?> providers(Class<?> cls) {
String strFullName = "com/example/voiceautomator/AudioFileReader.class";
Iterator<?> iterator = createInstancesList(strFullName).iterator();
return iterator;
}
private static List<Object> createInstancesList(String strFullName) {
List<Object> providers = new ArrayList<Object>();
Iterator<?> classNames = createClassNames(strFullName);
if (classNames != null) {
while (classNames.hasNext()) {
String strClassName = (String) classNames.next();
try {
Class<?> cls = Class.forName(strClassName, REVERSE_ORDER, ClassLoader.getSystemClassLoader());
providers.add(0, cls.newInstance());
} catch (Throwable e) {
}
}
}
return providers;
}
private static Iterator<String> createClassNames(String strFullName) {
Set<String> providers = new ArraySet<String>();
Enumeration<?> configs = null;
try {
configs = Service.class.getClassLoader().getSystemResources(strFullName);
} catch (Throwable e) {
}
if (configs != null) {
while (configs.hasMoreElements()) {
URL configFileUrl = (URL) configs.nextElement();
InputStream input = null;
try {
input = configFileUrl.openStream();
} catch (Throwable e2) {
}
if (input != null) {
BufferedReader reader = new BufferedReader(new InputStreamReader(input));
try {
for (String strLine = reader.readLine(); strLine != null; strLine = reader.readLine()) {
strLine = strLine.trim();
int nPos = strLine.indexOf(35);
if (nPos >= 0) {
strLine = strLine.substring(0, nPos);
}
if (strLine.length() > 0) {
providers.add(strLine);
}
}
} catch (Throwable e22) {
}
}
}
}
Iterator<String> iterator = providers.iterator();
return iterator;
}
getClassLoader().getSystemResources in the Java File 4 (Service) gives me TwoEnumerationsInOne and configs.hasMoreElements() gives false so not able to go into while loop.
AudioFileReader.java is included in the package
Please guide me to resolve this issue?
Please don't forget I am working on this code in an android project
Please see the value of configs here
http://capsicumtech.in/Screenshot_1.png
Thanks in advance.
How can we make a fulltext search with collectionId on Oracle UCM? Is it possible to do fulltext search supplying recursively start with collectionId parameter?
I did some trials (you can take a look below) but if i test with collectionId, there is no result return.
public List<UCMDocumentTemplate> fullTextSearchByFolderId(#WebParam(name = "searchCriteria")
String paramSearchCriteria, #WebParam(name = "ucmFolderId")
Long ucmFolderId) throws UCMDocumentSearchException
{
List<UCMDocumentTemplate> documentTemplateList = new ArrayList<UCMDocumentTemplate>();
String documentSearchCriteria = "";
try
{
if (ucmFolderId != null)
documentSearchCriteria = "xCollectionID <= <qsch>" + ucmFolderId + "</qsch> <AND>";
documentSearchCriteria += "dDocFullText <substring> <qsch>" + paramSearchCriteria + "</qsch>";
List<Properties> childDocumentList = UCM_API.fullTextSearch(documentSearchCriteria);
UCMDocumentTemplate ucmDocumentTemplate = null;
if (childDocumentList != null)
for (Properties properties : childDocumentList)
{
ucmDocumentTemplate = transformToUCMDocumentTemplate(new UCMDocumentTemplate(), properties);
documentTemplateList.add(ucmDocumentTemplate);
}
}
catch (Exception e)
{
UCMDocumentSearchException exc = new UCMDocumentSearchException(documentSearchCriteria, e);
System.err.println(exc.getCompleteCode());
e.printStackTrace();
throw exc;
}
return documentTemplateList;
}
public static List<Properties> fullTextSearch(String searchCriteria) throws Exception
{
List<Properties> resultList = null;
List<Field> fields = null;
Properties responseProperties = null;
Properties inputBinderProperties = new Properties();
inputBinderProperties.put("IdcService", "GET_SEARCH_RESULTS");
inputBinderProperties.put("QueryText", searchCriteria);
inputBinderProperties.put("SearchEngineName", "databasefulltext");
inputBinderProperties.put("ResultCount", "500");
DataBinder responseBinder = getExecutedResponseBinder(userName, inputBinderProperties);
DataResultSet resultSet = responseBinder.getResultSet("SearchResults");
fields = resultSet.getFields();
resultList = new ArrayList<Properties>();
for (DataObject dataObject : resultSet.getRows())
{
responseProperties = new Properties();
for (Field field : fields)
{
if (field.getType() == Field.Type.DATE && dataObject.getDate(field.getName()) != null)
responseProperties.put(field.getName(), dataObject.getDate(field.getName()));
else
responseProperties.put(field.getName(), dataObject.get(field.getName()));
}
resultList.add(responseProperties);
}
return resultList;
}
i found a solution. when adding a parameter to inputBinderProperties, it works properly
inputBinderProperties.put("folderChildren", ucmFolderId);