How to take picture with ip camera using android - java

Hi i have created and android application using an ipcamera,for that i have used mjpeg class
Main class
package com.example.mjpeg;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import com.example.mjpeg.MjpegInputStream;
import com.example.mjpeg.MjpegView;
public class MainActivity extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://192.168.2.1/?action=appletvstream";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegInputstream class
package com.example.mjpeg;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
Mjpeg View
package com.example.mjpeg;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
i got this code from a blog,now i want to take a picture when i click on a button and save it in sd card,can anyone help me please
currently i can only view the video from the ipcam,i need to add a button in this class and when i click on the button it should capture a image and save it in sdcard.

You have a public Bitmap readMjpegFrame() method right there in MjpegInputStream.
Take the bitmap from that method and save it on SD card after you click on the button.

Related

How to make the word cloud of original image with set numbers of Words?

How to make the word cloud image of the image.
Like this original Above to output
I am trying with the image to Ascii using https://github.com/bachors/Android-Img2Ascii
TextView textAsc=(TextView)findViewById(R.id.textAsc);
Bitmap image = BitmapFactory.decodeResource(getResources(), R.drawable.step0001);
// Bitmap image = BitmapFactory.decodeFile(filename);
new Img2Ascii()
.bitmap(image)
.quality(4) // 1 - 5
.color(true)
.convert(new Img2Ascii.Listener() {
#Override
public void onProgress(int percentage) {
textAsc.setText(String.valueOf(percentage) + " %");
}
#Override
public void onResponse(Spannable text) {
textAsc.setText(text);
}
});
img2ascii.java
package com.bachors.img2ascii;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.os.AsyncTask;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.style.ForegroundColorSpan;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import static java.lang.Math.round;
/**
* Created by Bachors on 10/31/2017.
* https://github.com/bachors/Android-Img2Ascii
*/
public class Img2Ascii {
private String[] chars = {"#", "#", "+", "\\", ";", ":", ",", ".", "`", " "};
private Bitmap rgbImage;
private Boolean color = false;
private int quality = 3;
private int qualityColor = 6;
private Spannable response;
private Listener listener;
List<String> list = new ArrayList<String>();
public Img2Ascii(){
}
public Img2Ascii bitmap(Bitmap rgbImage){
this.rgbImage = rgbImage;
return this;
}
public Img2Ascii quality(int quality){
this.quality = quality;
return this;
}
public Img2Ascii color(Boolean color){
this.color = color;
return this;
}
public void convert(Listener listener) {
this.listener = listener;
new InstaApi().execute();
}
#SuppressLint("StaticFieldLeak")
private class InstaApi extends AsyncTask<String, Integer, Void> {
private InstaApi(){
}
#Override
protected void onPreExecute() {
super.onPreExecute();
list.add("Cool");
list.add("G");
list.add("S");
list.add("L");
}
#Override
protected Void doInBackground(String... arg0) {
if(color) {
quality = quality + qualityColor;
if (quality > 5 + qualityColor || quality < 1 + qualityColor)
quality = 3 + qualityColor;
}else{
if (quality > 5 || quality < 1)
quality = 3;
}
String tx;
SpannableStringBuilder span = new SpannableStringBuilder();
int width = rgbImage.getWidth();
int height = rgbImage.getHeight();
int i = 0;
for (int y = 0; y < height; y = y + quality) {
for (int x = 0; x < width; x = x + quality) {
int pixel = rgbImage.getPixel(x, y);
int red = Color.red(pixel);
int green = Color.green(pixel);
int blue = Color.blue(pixel);
if(color) {
Random randomizer = new Random();
String random =list.get(randomizer.nextInt(list.size()));
tx = random;
span.append(tx);
span.setSpan(new ForegroundColorSpan(Color.rgb(red, green, blue)), i, i + 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}else {
int brightness = red + green + blue;
brightness = round(brightness / (765 / (chars.length - 1)));
tx = chars[brightness];
span.append(tx);
}
i++;
}
tx = "\n";
span.append(tx);
publishProgress(y, height);
i++;
if(isCancelled()) break;
}
response = span;
return null;
}
protected void onProgressUpdate(Integer... progress) {
int current = progress[0];
int total = progress[1];
int percentage = 100 * current / total;
listener.onProgress(percentage);
}
#Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
listener.onResponse(response);
}
}
public interface Listener {
void onProgress(int percentage);
void onResponse(Spannable response);
}
}

BebopVideoView to Mat

I come up against the great problem.
I`m try to BebopVideoView to Mat.
(BebopVideoView is parrot drone source code)
But I was failed for several days.
Here is the code.
package com.hyeonjung.dronecontroll.view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;
import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "BebopVideoView";
private static final String VIDEO_MIME_TYPE = "video/avc";
private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;
private MediaCodec mMediaCodec;
private Lock mReadyLock;
private boolean mIsCodecConfigured = false;
private ByteBuffer mSpsBuffer;
private ByteBuffer mPpsBuffer;
private ByteBuffer[] mBuffers;
private static final int VIDEO_WIDTH = 640;
private static final int VIDEO_HEIGHT = 368;
public byte[] a;
public Mat k;
public BebopVideoView(Context context) {
super(context);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs) {
super(context, attrs);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
customInit();
}
private void customInit() {
mReadyLock = new ReentrantLock();
getHolder().addCallback(this);
}
public void displayFrame(ARFrame frame) {
mReadyLock.lock();
if ((mMediaCodec != null)) {
if (mIsCodecConfigured) {
// Here we have either a good PFrame, or an IFrame
int index = -1;
try {
index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer");
}
if (index >= 0) {
ByteBuffer b;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data
}
else {
b = mBuffers[index]; // fill inputBuffer with valid data
b.clear();
}
if (b != null) {
b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b.
getMat(frame);
saveMat(k);
}
try {
mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream
} catch (IllegalStateException e) {
Log.e(TAG, "Error while queue input buffer");
}
}
}
// Try to display previous frame
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outIndex;
try {
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
while (outIndex >= 0) {
mMediaCodec.releaseOutputBuffer(outIndex, true);
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
}
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer (outIndex)");
}
}
mReadyLock.unlock();
}
public void configureDecoder(ARControllerCodec codec) {
mReadyLock.lock();
if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
ARControllerCodec.H264 codecH264 = codec.getAsH264();
mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
mReadyLock.unlock();
}
private void configureMediaCodec() {
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
format.setByteBuffer("csd-0", mSpsBuffer);
format.setByteBuffer("csd-1", mPpsBuffer);
mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
mMediaCodec.start();
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
mBuffers = mMediaCodec.getInputBuffers();
}
mIsCodecConfigured = true;
}
private void initMediaCodec(String type) {
try {
mMediaCodec = MediaCodec.createDecoderByType(type);
} catch (IOException e) {
Log.e(TAG, "Exception", e);
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
}
private void releaseMediaCodec() {
if (mMediaCodec != null) {
if (mIsCodecConfigured) {
mMediaCodec.stop();
mMediaCodec.release();
}
mIsCodecConfigured = false;
mMediaCodec = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mReadyLock.lock();
initMediaCodec(VIDEO_MIME_TYPE);
mReadyLock.unlock();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mReadyLock.lock();
releaseMediaCodec();
mReadyLock.unlock();
}
public void getMat(ARFrame frame) {
k = new Mat();
k.get(150, 150, frame.getByteData());
k.put(150, 150, frame.getByteData());
//or
//byte[] a= new byte[b.remaining()];
//b.get(a);
//k.get(150, 150, a);
//k.put(150, 150, a);
}
public void saveMat (Mat mat) {
Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1);
Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);
File path = new File(Environment.getExternalStorageDirectory() + "/data");
path.mkdirs();
File file = new File(path, "image.png");
String filename = file.toString();
Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat);
if (bool)
Log.i(TAG, "SUCCESS writing image to external storage");
else
Log.i(TAG, "Fail writing image to external storage");
}
}
I think I can get an image related data from ByteBuffer b or frame.get ByteData ().
I was confirmed ByteBuffer b and frame.getByteData().
There was char data type with a range of -128 to 127.
So I was confirmed the result of getMat, saveMat and the result was a NULL(Mat k).
What is wrong?
Please help me T.T
If you use a TextureView you can simply grab a bitmap of it and convert it to a Mat. You need to use the TextureView's provided surface rather than the typical SurfaceView holder. This will require some additional refactoring of the mediaCodec lifecycle, but is a fairly trivial change.
public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
this.surface = new Surface(surface);
surfaceCreated = true;
}
...
}
And inside of configureMediaCodec use the class level surface captured in onSurfaceTextureAvailable instead....
mediaCodec.configure(format, surface, null, 0);
With a couple other minor tweaks you now have a lot more control over the view. You can do things like setTransform() and more importantly in your case getBitmap:
Mat mat = new Mat();
Utils.bitmapToMat(getBitmap(), mat);

java.lang.IllegalStateException: Could not find a method?

Im trying to screenshot the live stream of my ip cam. I am able to do it and save it to my sd card the problem is im getting an ("Unfortunately your Apps has stopped") after clicking the screenshot button. This is my code...
package com.example.isec.isec;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Random;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "MjpegView";
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
//private MjpegViewfunc clickFunc;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public void clickFunc(View view) {
if (view.getId() == R.id.button9) ;
// Toast.makeText(MjpegViewfunc.this, "Button Clicked", Toast.LENGTH_SHORT).show();
try {
//Bitmap bitmap = takeScreenShot();
MjpegView.this.thread.run();
// Bitmap bitmap = takeScreenShot(activity, ResourceID);
// saveBitmap(bitmap);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return;
}
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
mSurfaceHolder = surfaceHolder;
}
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) {
return new Rect(0, 0, dispWidth, dispHeight);
}
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized (mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
public Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width() + 2;
int bheight = b.height() + 2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left + 1, (bheight / 2) - ((p.ascent() + p.descent()) / 2) + 1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps;
while (mRun) {
if (surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(), bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if (showFps) {
p.setXfermode(mode);
if (ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom - ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right - ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
//return bm;
p.setXfermode(null);
frameCounter++;
if ((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter) + " fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
Random r = new Random();
int iterator = r.nextInt();
String mPath = Environment.getExternalStorageDirectory().toString() + "/screenshots/";
File imageFile = new File(mPath);
imageFile.mkdirs();
imageFile = new File(imageFile + "/" + iterator + "_screenshot.png");
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, bos);
byte[] bitmapdata = bos.toByteArray();
FileOutputStream fos = new FileOutputStream(imageFile);
fos.write(bitmapdata);
fos.flush();
fos.close();
return;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return;
}
} catch (IOException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in run", e);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
// return;
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if (mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while (retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in stopPlayback", e);
}
}
}
public MjpegView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) {
thread.setSurfaceSize(w, h);
}
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
and my errors ....
at android.view.View.performClick(View.java:4424)
at android.view.View$PerformClick.run(View.java:18383)
at android.os.Handler.handleCallback(Handler.java:733)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:137)
at android.app.ActivityThread.main(ActivityThread.java:4998)
at java.lang.reflect.Method.invokeNative(Native Method)
at java.lang.reflect.Method.invoke(Method.java:515)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:777)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:593)
at dalvik.system.NativeStart.main(Native Method)
09-09 11:18:47.227 10479-10900/com.example.isec.isec D/dalvikvm﹕ GC_FOR_ALLOC freed 4036K, 22% free 21589K/27336K, paused 79ms, total 82ms
09-09 11:18:47.237 10479-10900/com.example.isec.isec W/System.err﹕ java.io.FileNotFoundException: /storage/sdcard/screenshots/-1867569916_screenshot.png: open failed: EACCES (Permission denied)
09-09 11:18:47.237 10479-10900/com.example.isec.isec W/System.err﹕ at libcore.io.IoBridge.open(IoBridge.java:409)
09-09 11:18:47.237 10479-10900/com.example.isec.isec W/System.err﹕ at java.io.FileOutputStream.<init>(FileOutputStream.java:88)
09-09 11:18:47.237 10479-10900/com.example.isec.isec W/System.err﹕ at java.io.FileOutputStream.<init>(FileOutputStream.java:73)
09-09 11:18:47.247 10479-10900/com.example.isec.isec W/System.err﹕ at com.examp
You need to include the following line in your Android Manifest
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

RoboVM implementation of recording demo using AudioQueue results in "No #Marshaler found" error

I'm trying to implement iOS audio recording using RoboVM using the Apple's AudioQueue guide and their sample SpeakHere project
and am running into this error:
No #Marshaler found for parameter 1 of #Callback method <AQRecorder: void HandleInputBuffer(AQRecorder,org.robovm.apple.audiotoolbox.AudioQueue,org.robovm.apple.audiotoolbox.AudioQueueBuffer,org.robovm.apple.coreaudio.AudioTimeStamp,int,org.robovm.apple.coreaudio.AudioStreamPacketDescription)>
Any ideas? Here's the code I'm using:
Main.java:
import org.robovm.apple.coregraphics.CGRect;
import org.robovm.apple.foundation.NSAutoreleasePool;
import org.robovm.apple.uikit.UIApplication;
import org.robovm.apple.uikit.UIApplicationDelegateAdapter;
import org.robovm.apple.uikit.UIApplicationLaunchOptions;
import org.robovm.apple.uikit.UIButton;
import org.robovm.apple.uikit.UIButtonType;
import org.robovm.apple.uikit.UIColor;
import org.robovm.apple.uikit.UIControl;
import org.robovm.apple.uikit.UIControlState;
import org.robovm.apple.uikit.UIEvent;
import org.robovm.apple.uikit.UIScreen;
import org.robovm.apple.uikit.UIWindow;
public class IOSDemo extends UIApplicationDelegateAdapter {
private UIWindow window = null;
#Override
public boolean didFinishLaunching(UIApplication application,
UIApplicationLaunchOptions launchOptions) {
final AQRecorder aqRecorder = new AQRecorder();
final UIButton button = UIButton.create(UIButtonType.RoundedRect);
button.setFrame(new CGRect(115.0f, 121.0f, 91.0f, 37.0f));
button.setTitle("Start", UIControlState.Normal);
button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener() {
#Override
public void onTouchUpInside(UIControl control, UIEvent event) {
if(button.getTitle(UIControlState.Normal) == "Stop"){
aqRecorder.stopRecord();
button.setTitle("Start", UIControlState.Normal);
}
else{
aqRecorder.startRecord();
button.setTitle("Stop", UIControlState.Normal);
}
}
});
window = new UIWindow(UIScreen.getMainScreen().getBounds());
window.setBackgroundColor(UIColor.lightGray());
window.addSubview(button);
window.makeKeyAndVisible();
try {
aqRecorder.setUpAudioFormat();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public static void main(String[] args) {
try (NSAutoreleasePool pool = new NSAutoreleasePool()) {
UIApplication.main(args, null, IOSDemo.class);
}
}
}
AQRecorder.java:
import org.robovm.apple.audiotoolbox.AudioFile;
import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.AudioStreamPacketDescription;
import org.robovm.apple.coreaudio.AudioTimeStamp;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;
public class AQRecorder {
AudioStreamBasicDescription mDataFormat; // 2
AudioQueue mQueue; // 3
//AudioQueueBufferRef mBuffers[kNumberBuffers]; // 4
AudioFile mAudioFile; // 5
int bufferByteSize; // 6
int mCurrentPacket; // 7
boolean mIsRunning; // 8
public void startRecord(){
mQueue.start(null);
}
public void stopRecord(){
mQueue.stop(true);
}
#Callback
static void HandleInputBuffer(
AQRecorder aqData,
AudioQueue inAQ,
AudioQueueBuffer inBuffer,
AudioTimeStamp inStartTime,
int inNumPackets,
AudioStreamPacketDescription inPacketDesc
) {
AQRecorder pAqData = aqData; // 1
if (inNumPackets == 0 && pAqData.mDataFormat.mBytesPerPacket() != 0)
inNumPackets = inBuffer.mAudioDataByteSize() / pAqData.mDataFormat.mBytesPerPacket();
if (!aqData.mIsRunning) // 5
return;
System.out.println(inBuffer.mAudioData());
}
void setUpAudioFormat() throws NoSuchMethodException{
mDataFormat = new AudioStreamBasicDescription(
16000, // mSampleRate
AudioFormat.LinearPCM, // mFormatID
(1 << 2), // mFormatFlags
512, // mBytesPerPacket
1, // mFramesPerPacket
512, // mBytesPerFrame
1, // mChanneslPerFrame
16, // mBitsPerChannel
0 // mReserved
);
AudioQueuePtr mQueuePtr = new AudioQueuePtr();
mQueuePtr.set(mQueue);
VoidPtr self = new VoidPtr();
#SuppressWarnings("rawtypes")
Class[] cArg = new Class[6];
cArg[0] = AQRecorder.class;
cArg[1] = AudioQueue.class;
cArg[2] = AudioQueueBuffer.class;
cArg[3] = AudioTimeStamp.class;
cArg[4] = int.class;
cArg[5] = AudioStreamPacketDescription.class;
FunctionPtr handleInputBuffer = new FunctionPtr((AQRecorder.class).getDeclaredMethod("HandleInputBuffer", cArg));
AudioQueue.newInput(mDataFormat, handleInputBuffer, self, null, "", 0, mQueuePtr);
}
};
With RoboVM 1.0.0-beta-3 I was finally able to get audio record and playback working. Not sure why the recording audio queue takes up to 20 seconds to stop, but here is some sample code that works in the simulator and on my iPhone 4:
Main Class:
import java.util.Vector;
import org.robovm.apple.coregraphics.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.uikit.*;
public class TestAudioQueueCrash extends UIApplicationDelegateAdapter
{
private UIWindow window = null;
private int clickCount = 0;
#Override
public boolean didFinishLaunching(UIApplication application, UIApplicationLaunchOptions launchOptions)
{
final UIButton button = UIButton.create(UIButtonType.RoundedRect);
button.setFrame(new CGRect(15.0f, 121.0f, 291.0f, 37.0f));
button.setTitle("Click me!", UIControlState.Normal);
button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener()
{
#Override
public void onTouchUpInside(UIControl control, UIEvent event)
{
if (clickCount == 0)
{
button.setTitle("Recording for 5 seconds... (SPEAK!)", UIControlState.Normal);
Runnable r = new Runnable()
{
public void run()
{
try
{
clickCount = 1;
AudioRecord record = new AudioRecord();
record.startRecording();
long when = System.currentTimeMillis() + 5000;
final Vector<byte[]> v = new Vector();
byte[] ba = new byte[3072];
while (System.currentTimeMillis() < when)
{
int n = 0;
while (n<3072)
{
int i = record.read(ba, n, 3072-n);
if (i==-1 || i == 0) break;
n += i;
}
if (n>0)
{
byte[] ba2 = new byte[n];
System.arraycopy(ba, 0, ba2, 0, n);
v.addElement(ba2);
}
}
System.out.println("DONE RECORDING");
record.release();
System.out.println("RECORDER STOPPED");
System.out.println("Playing back recorded audio...");
button.setTitle("Playing back recorded audio...", UIControlState.Normal);
AudioTrack at = new AudioTrack();
at.play();
while (v.size() > 0)
{
ba = v.remove(0);
at.write(ba, 0, ba.length);
Thread.yield();
}
at.stop();
button.setTitle("DONE", UIControlState.Normal);
System.out.println("FINISHED PIPING AUDIO");
}
catch (Exception x)
{
x.printStackTrace();
button.setTitle("ERROR: " + x.getMessage(), UIControlState.Normal);
}
clickCount = 0;
}
};
new Thread(r).start();
}
}
});
window = new UIWindow(UIScreen.getMainScreen().getBounds());
window.setBackgroundColor(UIColor.lightGray());
window.addSubview(button);
window.makeKeyAndVisible();
return true;
}
public static void main(String[] args)
{
try (NSAutoreleasePool pool = new NSAutoreleasePool())
{
UIApplication.main(args, null, TestAudioQueueCrash.class);
}
}
}
AQRecorderState:
/*<imports>*/
import java.util.Hashtable;
import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/
/*<javadoc>*/
/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQRecorderState/*</name>*/
extends /*<extends>*/Struct<AQRecorderState>/*</extends>*/
/*<implements>*//*</implements>*/ {
protected static Hashtable<Integer, AudioRecord> mAudioRecords = new Hashtable<>();
protected static int mLastID = 0;
/*<ptr>*/public static class AQRecorderStatePtr extends Ptr<AQRecorderState, AQRecorderStatePtr> {}/*</ptr>*/
/*<bind>*/
/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*/
public AQRecorderState() {}
public AQRecorderState(AudioRecord ar)
{
this.mID(++mLastID);
mAudioRecords.put(mID(), ar);
}
/*</constructors>*/
/*<properties>*//*</properties>*/
/*<members>*/
#StructMember(0) public native int mID();
#StructMember(0) public native AQRecorderState mID(int mID);
/*</members>*/
/*<methods>*//*</methods>*/
public AudioRecord getRecord()
{
return mAudioRecords.get(mID());
}
public static void drop(int mStateID)
{
mAudioRecords.remove(mStateID);
}
}
AudioRecord:
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.lang.reflect.Method;
import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.AudioStreamPacketDescription.AudioStreamPacketDescriptionPtr;
import org.robovm.apple.coreaudio.AudioTimeStamp.AudioTimeStampPtr;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.apple.corefoundation.CFRunLoopMode;
import org.robovm.rt.VM;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Library;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;
/*<annotations>*/#Library("AudioToolbox")/*</annotations>*/
public class AudioRecord
{
protected double mSampleRate;
protected AudioFormat mFormatID;
protected int mFormatFlags;
protected int mBytesPerPacket;
protected int mFramesPerPacket;
protected int mBytesPerFrame;
protected int mChannelsPerFrame;
protected int mBitsPerChannel;
protected AudioQueue mQueue = null;
private int kNumberBuffers = 3;
private PipedInputStream mPIS;
private PipedOutputStream mPOS;
private int mStateID = -1;
private boolean mRunning = false;
public AudioRecord() throws IOException
{
mSampleRate = 44100;
mFormatID = AudioFormat.LinearPCM;
mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
mBytesPerPacket = 2;
mFramesPerPacket = 1;
mBytesPerFrame = 2;
mChannelsPerFrame = 1;
mBitsPerChannel = 16;
mPOS = new PipedOutputStream();
mPIS = new PipedInputStream(mPOS);
}
public static int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat)
{
// TODO Auto-generated method stub
return 0;
}
public int deriveBufferSize(AudioQueue audioQueue, AudioStreamBasicDescription ASBDescription, double seconds)
{
int maxBufferSize = 0x50000;
int maxPacketSize = ASBDescription.getMBytesPerPacket();
System.out.println(3);
double numBytesForTime = ASBDescription.getMSampleRate() * maxPacketSize * seconds;
return (int)(numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize);
}
public void release()
{
System.out.println("RECORD QUEUE STOPPING...");
mRunning = false;
mQueue.stop(true);
// mQueue.dispose(true);
System.out.println("RECORD QUEUE STOPPED");
try
{
mPOS.close();
mPIS.close();
AQRecorderState.drop(mStateID);
}
catch (Exception x) { x.printStackTrace(); }
}
public int read(byte[] abData, int i, int length) throws IOException
{
return mPIS.read(abData, i, length);
}
/*<bind>*/static { Bro.bind(AudioRecord.class); }/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*//*</constructors>*/
/*<properties>*//*</properties>*/
/*<members>*//*</members>*/
#Callback
public static void callbackMethod(
#Pointer long refcon,
AudioQueue inAQ,
AudioQueueBuffer inBuffer,
AudioTimeStampPtr inStartTime,
int inNumPackets,
AudioStreamPacketDescriptionPtr inPacketDesc
)
{
try
{
System.out.println("a");
AQRecorderState.AQRecorderStatePtr ptr = new AQRecorderState.AQRecorderStatePtr();
ptr.set(refcon);
System.out.println("b");
AQRecorderState aqrs = ptr.get();
System.out.println("c");
byte[] ba = VM.newByteArray(inBuffer.getMAudioData().getHandle(), inBuffer.getMAudioDataByteSize());
System.out.println("d");
aqrs.getRecord().receive(ba);
System.out.println("e");
}
catch (Exception x) { x.printStackTrace(); }
inAQ.enqueueBuffer(inBuffer, 0, null);
System.out.println("f");
}
private void receive(byte[] ba)
{
if (mRunning) try { mPOS.write(ba); } catch (Exception x) { x.printStackTrace(); }
}
public void startRecording() throws Exception
{
AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
AudioQueuePtr mQueuePtr = new AudioQueuePtr();
AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
System.out.println(11);
AQRecorderState aqData = new AQRecorderState(this);
mStateID = aqData.mID();
System.out.println(12);
Method callbackMethod = null;
Method[] methods = this.getClass().getMethods();
int i = methods.length;
while (i-->0) if (methods[i].getName().equals("callbackMethod"))
{
callbackMethod = methods[i];
break;
}
FunctionPtr fp = new FunctionPtr(callbackMethod );
System.out.println(13);
VoidPtr vp = aqData.as(VoidPtr.class);
System.out.println(14);
AudioQueueError aqe = AudioQueue.newInput(asbd, fp, vp, null, null, 0, mQueuePtr);
System.out.println(CFRunLoopMode.Common.value());
System.out.println(aqe.name());
mQueue = mQueuePtr.get();
System.out.println(2);
int bufferByteSize = deriveBufferSize(mQueue, asbd, 0.5);
System.out.println("BUFFER SIZE: "+bufferByteSize);
AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);
for (i = 0; i < kNumberBuffers; ++i)
{
mQueue.allocateBuffer(bufferByteSize, buffers[i]);
mQueue.enqueueBuffer(buffers[i].get(), 0, null);
}
mRunning = true;
mQueue.start(null);
}
}
AQPlayerState:
/*<imports>*/
import java.util.Hashtable;
import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/
/*<javadoc>*/
/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQPlayerState/*</name>*/
extends /*<extends>*/Struct<AQPlayerState>/*</extends>*/
/*<implements>*//*</implements>*/ {
protected static Hashtable<Integer, AudioTrack> mAudioTracks = new Hashtable<>();
protected static int mLastID = 0;
/*<ptr>*/public static class AQPlayerStatePtr extends Ptr<AQPlayerState, AQPlayerStatePtr> {}/*</ptr>*/
/*<bind>*/
/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*/
public AQPlayerState() {}
public AQPlayerState(AudioTrack ar)
{
this.mID(++mLastID);
this.mID2(mLastID);
mAudioTracks.put(mID(), ar);
}
/*</constructors>*/
/*<properties>*//*</properties>*/
/*<members>*/
#StructMember(0) public native int mID();
#StructMember(0) public native AQPlayerState mID(int mID);
#StructMember(1) public native int mID2();
#StructMember(1) public native AQPlayerState mID2(int mID2);
/*</members>*/
/*<methods>*//*</methods>*/
public AudioTrack getTrack()
{
return mAudioTracks.get(mID());
}
public static void drop(int mStateID)
{
mAudioTracks.remove(mStateID);
}
}
AudioTrack:
import java.lang.reflect.Method;
import java.util.Vector;
import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.audiotoolbox.AudioQueueParam;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.BytePtr;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;
public class AudioTrack {
public static final int MODE_STREAM = -1;
private int kNumberBuffers = 3;
private Vector<byte[]> mData = new Vector<>();
private int mStateID = -1;
private boolean mRunning = false;
protected double mSampleRate;
protected AudioFormat mFormatID;
protected int mFormatFlags;
protected int mBytesPerPacket;
protected int mFramesPerPacket;
protected int mBytesPerFrame;
protected int mChannelsPerFrame;
protected int mBitsPerChannel;
protected AudioQueue mQueue = null;
public AudioTrack()
{
mSampleRate = 44100;
mFormatID = AudioFormat.LinearPCM;
mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
mBytesPerPacket = 2;
mFramesPerPacket = 1;
mBytesPerFrame = 2;
mChannelsPerFrame = 1;
mBitsPerChannel = 16;
}
public static int getMinBufferSize(int sampleRate, int channelConfigurationMono, int encodingPcm16bit)
{
// TODO Auto-generated method stub
return 0;
}
public int deriveBufferSize(AudioStreamBasicDescription ASBDescription, int maxPacketSize, double seconds)
{
int maxBufferSize = 0x50000;
int minBufferSize = 0x4000;
double numPacketsForTime = ASBDescription.getMSampleRate() / ASBDescription.getMFramesPerPacket() * seconds;
int outBufferSize = (int)(numPacketsForTime * maxPacketSize);
if (outBufferSize > maxBufferSize) return maxBufferSize;
if (outBufferSize < minBufferSize) return minBufferSize;
return outBufferSize;
}
/*<bind>*/static { Bro.bind(AudioTrack.class); }/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*//*</constructors>*/
/*<properties>*//*</properties>*/
/*<members>*//*</members>*/
#Callback
public static void callbackMethod(
#Pointer long refcon,
AudioQueue inAQ,
AudioQueueBuffer inBuffer
)
{
System.out.println("In Callback");
AQPlayerState.AQPlayerStatePtr ptr = new AQPlayerState.AQPlayerStatePtr();
ptr.set(refcon);
AQPlayerState aqps = ptr.get();
AudioTrack me = aqps.getTrack();
me.nextChunk(inAQ, inBuffer);
}
private void nextChunk(AudioQueue inAQ, AudioQueueBuffer inBuffer)
{
byte[] ba = null;
long when = System.currentTimeMillis() + 30000;
while (mRunning && System.currentTimeMillis() < when)
{
if (mData.size() > 0)
{
ba = mData.remove(0);
break;
}
try { Thread.yield(); } catch (Exception x) { x.printStackTrace(); }
}
if (ba == null) ba = new byte[0];
System.out.println("PLAYING BYTES: "+ba.length);
if (ba.length>0)
{
VoidPtr vp = inBuffer.getMAudioData();
BytePtr bp = vp.as(BytePtr.class); //Struct.allocate(BytePtr.class, ba.length);
bp.set(ba);
// inBuffer.setMAudioData(vp);
inBuffer.setMAudioDataByteSize(ba.length);
}
mQueue.enqueueBuffer(inBuffer, 0, null);
}
public void play()
{
final AudioTrack me = this;
Runnable r = new Runnable()
{
public void run()
{
AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
AudioQueuePtr mQueuePtr = new AudioQueuePtr();
Method callbackMethod = null;
Method[] methods = me.getClass().getMethods();
int i = methods.length;
while (i-->0) if (methods[i].getName().equals("callbackMethod"))
{
callbackMethod = methods[i];
break;
}
FunctionPtr fp = new FunctionPtr(callbackMethod );
AQPlayerState aqData = new AQPlayerState(me);
mStateID = aqData.mID();
VoidPtr vp = aqData.as(VoidPtr.class);
// AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, CFRunLoop.getCurrent(), new CFString(CFRunLoopMode.Common.value()), 0, mQueuePtr);
AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, null, null, 0, mQueuePtr);
System.out.println(aqe.name());
mQueue = mQueuePtr.get();
int bufferByteSize = deriveBufferSize(asbd, 2, 0.5);
System.out.println("BUFFER SIZE: "+bufferByteSize);
System.out.println("Volume PARAM:"+(int)AudioQueueParam.Volume.value());
mQueue.setParameter((int)AudioQueueParam.Volume.value(), 1.0f);
mRunning = true;
AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);
for (i = 0; i < kNumberBuffers; ++i)
{
mQueue.allocateBuffer(bufferByteSize, buffers[i]);
nextChunk(mQueue, buffers[i].get());
}
System.out.println("STARTING QUEUE");
mQueue.start(null);
System.out.println("QUEUE STARTED");
/*
System.out.println("RUNNING LOOP");
do
{
System.out.print(".");
CFRunLoop.runInMode(CFRunLoopMode.Default, 0.25, false);
System.out.print("#");
}
while (mRunning);
System.out.println("!!!");
CFRunLoop.runInMode(CFRunLoopMode.Default, 1, false);
System.out.println("DONE RUNNING LOOP");
mQueue.stop(true);
AQPlayerState.drop(mStateID);
System.out.println("QUEUE STOPPED");
*/
}
};
new Thread(r).start();
}
public void write(byte[] ba, int i, int length)
{
while (mData.size() > 10) Thread.yield();
System.out.println("SOUND IN: "+length+" bytes");
mData.addElement(ba);
}
public void stop()
{
System.out.println("STOPPING AUDIO PLAYER");
mRunning = false;
mQueue.stop(true);
AQPlayerState.drop(mStateID);
}
public void release()
{
// TODO Auto-generated method stub
}
}

After running an Eclipse SWT application it shows many exceptions

After running my SWT application it doesn’t have any error at the time of compilation.
But after running it will shows the output for few sec and then the eclipse instance will become not responsive.
Please help me to avoid exceptions
I try to increase heap size.anybody is here to help me.....plzzzzzzzzzzzzzzzzzzzz
here i will give my code. i think my logic also have a problem. but i dont know how to correct it. i just follow on book to do this.here it is a program for clock. and i did in each movement of second hand a new thread is created. i want to make it as one thread.
it shows unable to create new native threads
activator.java
package com.packtpub.e4.clock.ui;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Tray;
import org.eclipse.swt.widgets.TrayItem;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "com.packtpub.e4.clock.ui"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
private TrayItem trayItem;
private Image image;
private Shell shell;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* #see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
final Display display = Display.getDefault();
display.asyncExec(new Runnable() {
public void run() {
image = new Image(display, Activator.class.getResourceAsStream("/icons/sample.gif"));
Tray tray = display.getSystemTray();
if (tray != null && image != null) {
trayItem = new TrayItem(tray, SWT.NONE);
trayItem.setToolTipText("Hello World");
trayItem.setVisible(true);
trayItem.setText("Hello World");
trayItem.setImage(new Image(trayItem.getDisplay(),
Activator.class.getResourceAsStream("/icons/sample.gif")));
}
trayItem.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
if (shell == null) {
shell = new Shell(trayItem.getDisplay());
shell.setLayout(new FillLayout());
new ClockWidget(shell, SWT.NONE, new RGB(255, 0, 255));
shell.pack();
}
shell.open();
}
#Override
public void widgetDefaultSelected(SelectionEvent e) {
// TODO Auto-generated method stub
}
});
}
});
}
/*
* (non-Javadoc)
* #see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
if (trayItem != null && !trayItem.isDisposed()) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
if (trayItem != null && !trayItem.isDisposed())
trayItem.dispose();
}
});
}
if (image != null && !image.isDisposed()) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
if (image != null && !image.isDisposed())
image.dispose();
}
});
}
if (shell != null && !shell.isDisposed()) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
if (shell != null && !shell.isDisposed())
shell.dispose();
}
});
}
}
/**
* Returns the shared instance
*
* #return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
/**
* Returns an image descriptor for the image file at the given
* plug-in relative path
*
* #param path the path
* #return the image descriptor
*/
public static ImageDescriptor getImageDescriptor(String path) {
return imageDescriptorFromPlugin(PLUGIN_ID, path);
}
}
clockwidget.java
package com.packtpub.e4.clock.ui;
import java.util.Date;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
public class ClockWidget extends Canvas
{
private final Color color;
private int offset;
public void setOffset(int offset)
{
this.offset = offset;
}
public Color getColor()
{
return color;
}
public int getOffset()
{
return offset;
}
public ClockWidget(Composite parent, int style,RGB rgb)
{
super(parent, style);
this.color = new Color(parent.getDisplay(),rgb);
addDisposeListener(new DisposeListener()
{
public void widgetDisposed(DisposeEvent e)
{
if(color != null && !color.isDisposed())
color.dispose();
}
});
addPaintListener(new PaintListener()
{
public void paintControl(PaintEvent e)
{
ClockWidget.this.paintControl(e);
}
});
}
public void paintControl(PaintEvent e)
{
#SuppressWarnings("deprecation")
int seconds = new Date().getSeconds();
int arc = (15-seconds) * 6 % 360;
e.gc.setBackground(color);
e.gc.fillArc(e.x,e.y,e.width-1,e.height-1,arc-1,2);
e.gc.drawArc(e.x,e.y,e.width-1,e.height-1,0,360);
e.gc.setBackground(e.display.getSystemColor(SWT.COLOR_BLACK));
#SuppressWarnings("deprecation")
int hours = new Date().getHours() + offset;
arc = (3 - hours) * 30 % 360;
e.gc.fillArc(e.x, e.y, e.width-1, e.height-1, arc - 5, 10);
new Thread("TickTock")
{
public void run()
{
while (!ClockWidget.this.isDisposed())
{
ClockWidget.this.getDisplay().asyncExec(
new Runnable()
{
public void run()
{
if (!ClockWidget.this.isDisposed())
ClockWidget.this.redraw();
}
});
try
{
Thread.sleep(99999);
}
catch (InterruptedException e)
{
System.out.println("#clock"+e.toString());
return;
}
}
}
}.start();
}
public Point computeSize(int w,int h,boolean changed)
{
int size;
if(w == SWT.DEFAULT)
{
size = h;
}
else if (h == SWT.DEFAULT)
{
size = w;
}
else
{
size = Math.min(w,h);
}
if(size == SWT.DEFAULT)
size = 50;
return new Point(size,size);
}
}
SampleView.java
package com.packtpub.e4.clock.ui.views;
import java.util.TimeZone;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.layout.RowLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.part.*;
import org.eclipse.swt.SWT;
import com.packtpub.e4.clock.ui.ClockWidget;
public class SampleView extends ViewPart {
private Combo timezones;
public void createPartControl(Composite parent) {
try{
RowLayout layout = new RowLayout(SWT.HORIZONTAL);
parent.setLayout(layout);
Object[] oo=parent.getDisplay().getDeviceData().objects;
int c = 0;
for (int j = 0; j < oo.length; j++)
if (oo[j] instanceof Color)
c++;
System.err.println("There are " + c + " Color instances");
final ClockWidget clock1 =new ClockWidget(parent, SWT.NONE, new RGB(255,0,0));
//final ClockWidget clock2 =new ClockWidget(parent, SWT.NONE, new RGB(0,255,0));
//final ClockWidget clock3 =new ClockWidget(parent, SWT.NONE, new RGB(0,0,255));
//clock1.setLayoutData(new RowData(20,20));
//clock3.setLayoutData(new RowData(100,100));
String[] ids = TimeZone.getAvailableIDs();
timezones = new Combo(parent, SWT.SIMPLE);
timezones.setVisibleItemCount(5);
for (int i = 0; i < ids.length; i++) {
timezones.add(ids[i]);
timezones.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
String z = timezones.getText();
TimeZone tz = z == null ? null : TimeZone.getTimeZone(z);
TimeZone dt = TimeZone.getDefault();
int offset = tz == null ? 0 : (
tz.getOffset(System.currentTimeMillis()) -
dt.getOffset(System.currentTimeMillis())) / 3600000;
clock1.setOffset(offset);
clock1.redraw();
}
public void widgetDefaultSelected(SelectionEvent e) {
clock1.setOffset(0);
clock1.redraw();
}
});
}
}catch(Exception e){
System.out.println("# SampleView.java"+e.toString());
}
}
public void setFocus() {
timezones.setFocus();
}
}
i got the answer... here the thread call is happend in clockwidgets paintcontrol function cut it from there and paste it on clockwidget constructor.
then the code will work proper
package com.packtpub.e4.clock.ui;
import java.util.Date;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
public class ClockWidget extends Canvas
{
private final Color color;
private int offset;
public void setOffset(int offset)
{
this.offset = offset;
}
public Color getColor()
{
return color;
}
public int getOffset()
{
return offset;
}
public ClockWidget(Composite parent, int style,RGB rgb)
{
super(parent, style);
this.color = new Color(parent.getDisplay(),rgb);
addDisposeListener(new DisposeListener()
{
public void widgetDisposed(DisposeEvent e)
{
if(color != null && !color.isDisposed())
color.dispose();
}
});
new Thread("TickTock")
{
public void run()
{
while (!ClockWidget.this.isDisposed())
{
ClockWidget.this.getDisplay().asyncExec(
new Runnable()
{
public void run()
{
if (!ClockWidget.this.isDisposed())
ClockWidget.this.redraw();
}
});
try
{
Thread.sleep(1000);
}
catch (InterruptedException e)
{
return;
}
}
}
}.start();
addPaintListener(new PaintListener()
{
public void paintControl(PaintEvent e)
{
ClockWidget.this.paintControl(e);
}
});
}
public void paintControl(PaintEvent e)
{
#SuppressWarnings("deprecation")
int seconds = new Date().getSeconds();
int arc = (15-seconds) * 6 % 360;
e.gc.setBackground(color);
e.gc.fillArc(e.x,e.y,e.width-1,e.height-1,arc-1,2);
e.gc.drawArc(e.x,e.y,e.width-1,e.height-1,0,360);
e.gc.setBackground(e.display.getSystemColor(SWT.COLOR_BLACK));
#SuppressWarnings("deprecation")
int hours = new Date().getHours() + offset;
arc = (3 - hours) * 30 % 360;
e.gc.fillArc(e.x, e.y, e.width-1, e.height-1, arc - 5, 10);
}
public Point computeSize(int w,int h,boolean changed)
{
int size;
if(w == SWT.DEFAULT)
{
size = h;
}
else if (h == SWT.DEFAULT)
{
size = w;
}
else
{
size = Math.min(w,h);
}
if(size == SWT.DEFAULT)
size = 50;
return new Point(size,size);
}
}

Categories