I'm trying to switch the camera with a button and i'm also successul in that but the proble is that the preview of the rear camera is in inverse portrait i've tryied the setDiplayOrientation, but no changes... Maybe i've put it in a wrong line this is the code and with the button i call the start camera method:
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
Any type of help is appreciated ... thank you so much
here is the preview that i see
I have tried making sense, but there is few knowledge on what API Level your trying to target , that may also be the problem or maybe the problem may be the Test Device.
But lets rewrite the camera Display code:
1)
public void setCameraDisplayOrientation(android.hardware.Camera camera) {
Camera.Parameters parameters = camera.getParameters();
android.hardware.Camera.CameraInfo camInfo =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getBackFacingCameraId(), camInfo);
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (camInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (camInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (camInfo.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Call setCameraDisplayOrientation() method in surfaceCreated callback as the following:
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
camera = Camera.open();
setCameraDisplayOrientation(getActivity(), CameraInfo.CAMERA_FACING_BACK, camera);
}
Method 2 is to try:
Get the phone's orientation by using a sensor and this makes life much easier because the orientation will be properly handle
You can find more about this within this link: Is Android's CameraInfo.orientation correctly documented? Incorrectly implemented?
Method 3:
edit your code like
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
//Below i have tried to change the camera facing
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor
(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
This is sometimes due to android not knowing what orientation is the previous activity coming from, try setting the orientation of the application with
android:screenOrientation="nosensor"
or try the following:
Add the orientation attribute in the manifest
android:screenOrientation=["unspecified" | "behind" |
"landscape" | "portrait" |
"reverseLandscape" | "reversePortrait" |
"sensorLandscape" | "sensorPortrait" |
"userLandscape" | "userPortrait" |
"sensor" | "fullSensor" | "nosensor" |
"user" | "fullUser" | "locked"]
So in your case it will be
<activity android:name=".yourCameractivity"
....
android:screenOrientation="sensorPortrait"/>
Related
I'm currently trying to make a 2D game using VSCode, I am on a MacBook, and keep getting this error whenever trying to load the map. My map for some reason will only appear as all white, without the character, and I get this error in the terminal:
Exception in thread "AWT-EventQueue-0" java.lang.ArrayIndexOutOfBoundsException: Index 12 out of bounds for length 12
at tile.TileManager.draw(TileManager.java:102)
at main.GamePanel.paintComponent(GamePanel.java:94)
Here is my TileManager class
package tile;
import main.GamePanel;
import java.io.IOException;
import javax.imageio.ImageIO;
import java.awt.Graphics2D;
import java.io.InputStream;
import java.io.*;
public class TileManager {
GamePanel gp;
Tile[] tile;
int mapTileNum[][];
public TileManager(GamePanel gp) {
this.gp = gp;
tile = new Tile[10];
mapTileNum = new int[gp.maxWorldCol][gp.maxScreenRow];
getTileImage();
loadMap("res/maps/world01.txt");
}
public void getTileImage() {
System.out.println("image loading started");
try {
this.tile[0] = new Tile();
this.tile[0].image = ImageIO.read(new FileInputStream("res/tiles/grass01.png"));
this.tile[1] = new Tile();
this.tile[1].image = ImageIO.read(new FileInputStream("res/tiles/wall.png"));
this.tile[2] = new Tile();
this.tile[2].image = ImageIO.read(new FileInputStream("res/tiles/water01.png"));
this.tile[3] = new Tile();
this.tile[3].image = ImageIO.read(new FileInputStream("res/tiles/earth.png"));
this.tile[4] = new Tile();
this.tile[4].image = ImageIO.read(new FileInputStream("res/tiles/tree.png"));
this.tile[5] = new Tile();
this.tile[5].image = ImageIO.read(new FileInputStream("res/tiles/sand.png"));
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("Image loading finished");
}
public void loadMap(String filePath) {
try {
InputStream is = getClass().getResourceAsStream(filePath);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
int col = 0;
int row = 0;
while (col < gp.maxWorldCol && row < gp.maxWorldRow) {
String line = br.readLine();
while (col < gp.maxWorldCol) {
String numbers[] = line.split(" "); // splits up strings at space
int num = Integer.parseInt(numbers[col]);
mapTileNum[col][row] = num;
col++;
}
if (col == gp.maxWorldCol) {
col = 0;
row++;
}
}
br.close();
} catch (Exception e) {
}
}
public void draw(Graphics2D g2) {
int worldCol = 0;
int worldRow = 0;
while (worldCol < gp.maxWorldCol && worldRow < gp.maxWorldRow) {
int tileNum = mapTileNum[worldCol][worldRow];
int worldX = worldCol * gp.tileSize;
int worldY = worldRow * gp.tileSize;
int screenX = worldX - gp.player.worldX + gp.player.screenX;
int screenY = worldY - gp.player.worldY + gp.player.screenY;
if (worldX + gp.tileSize > gp.player.worldX - gp.player.screenX &&
worldX - gp.tileSize < gp.player.worldX + gp.player.screenX &&
worldY + gp.tileSize > gp.player.worldY - gp.player.screenY &&
worldY - gp.tileSize < gp.player.worldY + gp.player.screenY) {
g2.drawImage(tile[tileNum].image, screenX, screenY, gp.tileSize, gp.tileSize, null);
}
g2.drawImage(tile[tileNum].image, screenX, screenY, gp.tileSize, gp.tileSize, null);
worldCol++;
if (worldCol == gp.maxWorldCol) {
worldCol = 0;
worldRow++;
}
}
}
}
In the public tile manager call the getTileImage method to initialize the tiles
public TileManager(GamePanel gp) {
this.gp = gp;
tile = new Tile[10];
getTileImage(); // Call the getTileImage method to initialize the tiles
}
I was using processing in netbeans to play a movie on an array of ledstrips and I am using OPC.class for ledstrip fadecandy mapping. This code works on the processing sketch, but when I tried to use it on netbeans the loadpixel() in draw() method of OPC.java throws a nullpointer exception.
Stacktrace:
Exception in thread "Animation Thread" java.lang.NullPointerException
at processing.core.PApplet.loadPixels(PApplet.java:10625)
at com.processing.OPC.draw(OPC.java:139)
at com.processing.Video.draw(Video.java:62)
at processing.core.PApplet.handleDraw(PApplet.java:2402)
at processing.awt.PSurfaceAWT$12.callDraw(PSurfaceAWT.java:1527)
at processing.core.PSurfaceNone$AnimationThread.run(PSurfaceNone.java:316)
Video.java
public class Video extends PApplet
{
OPC opc;
Movie movie;
public static void main(String args[])
{
PApplet.main(new String[] { "--present", "com.processing.Video" });
}
public void settings()
{
size(600, 240);
}
public void setup()
{
opc = new OPC(this, "192.168.15.10", 7890);
for(int i=0; i<24; i++) {
opc.ledStrip(i * 60, 60,
300, i * 240 / 24 + 240 / 48, 240 / 24, PI, false);
}
movie = new Movie(this, "waffle.mp4");
movie.loop();
}
public void movieEvent(Movie m)
{
m.read();
}
#Override
public void draw()
{
if (movie.available() == true) {
movie.read();
}
image(movie, 0, 0, width, height);
}
}
OPC.java
public class OPC extends PApplet implements Runnable
{
Thread thread;
Socket socket;
OutputStream output, pending;
String host;
int port;
int height = 240;
int width = 600;
int[] pixelLocations;
byte[] packetData;
byte firmwareConfig;
String colorCorrection;
boolean enableShowLocations;
PApplet parent;
OPC(PApplet parent, String host, int port)
{
this.host = host;
this.port = port;
thread = new Thread(this);
thread.start();
this.enableShowLocations = true;
registerMethod("draw", this);
}
public void led(int index, int x, int y){
if (pixelLocations == null) {
pixelLocations = new int[index + 1];
} else if (index >= pixelLocations.length) {
pixelLocations = Arrays.copyOf(pixelLocations, index + 1);
}
pixelLocations[index] = x + 600 * y;
}
public void ledStrip(int index, int count, float x, float y, float spacing, float angle, boolean reversed)
{
float s = sin(angle);
float c = cos(angle);
for (int i = 0; i < count; i++) {
led(reversed ? (index + count - 1 - i) : (index + i),
(int)(x + (i - (count-1)/2.0) * spacing * c + 0.5),
(int)(y + (i - (count-1)/2.0) * spacing * s + 0.5));
}
}
void showLocations(boolean enabled)
{
enableShowLocations = enabled;
}
void setColorCorrection(String s)
{
colorCorrection = s;
sendColorCorrectionPacket();
}
void sendFirmwareConfigPacket()
{
if (pending == null) {
return;
}
byte[] packet = new byte[9];
packet[0] = (byte)0x00; // Channel (reserved)
packet[1] = (byte)0xFF; // Command (System Exclusive)
packet[2] = (byte)0x00; // Length high byte
packet[3] = (byte)0x05; // Length low byte
packet[4] = (byte)0x00; // System ID high byte
packet[5] = (byte)0x01; // System ID low byte
packet[6] = (byte)0x00; // Command ID high byte
packet[7] = (byte)0x02; // Command ID low byte
packet[8] = (byte)firmwareConfig;
try {
pending.write(packet);
} catch (Exception e) {
dispose();
}
}
void sendColorCorrectionPacket()
{
if (colorCorrection == null) {
return;
}
if (pending == null) {
return;
}
byte[] content = colorCorrection.getBytes();
int packetLen = content.length + 4;
byte[] header = new byte[8];
header[0] = (byte)0x00; // Channel (reserved)
header[1] = (byte)0xFF; // Command (System Exclusive)
header[2] = (byte)(packetLen >> 8); // Length high byte
header[3] = (byte)(packetLen & 0xFF); // Length low byte
header[4] = (byte)0x00; // System ID high byte
header[5] = (byte)0x01; // System ID low byte
header[6] = (byte)0x00; // Command ID high byte
header[7] = (byte)0x01; // Command ID low byte
try {
pending.write(header);
pending.write(content);
} catch (Exception e) {
dispose();
}
}
public void draw()
{
if (pixelLocations == null) {
return;
}
if (output == null) {
return;
}
int numPixels = pixelLocations.length;
int ledAddress = 4;
setPixelCount(numPixels);
println("pixel loading");
loadPixels();
println("pixel loaded123");
for (int i = 0; i < numPixels; i++) {
int pixelLocation = pixelLocations[i];
int pixel = pixels[pixelLocation];
packetData[ledAddress] = (byte)(pixel >> 16);
packetData[ledAddress + 1] = (byte)(pixel >> 8);
packetData[ledAddress + 2] = (byte)pixel;
ledAddress += 3;
if (true) {
pixels[pixelLocation] = 0xFFFFFF ^ pixel;
}
}
writePixels();
if (enableShowLocations) {
updatePixels();
print("a");
}
}
void setPixelCount(int numPixels)
{
int numBytes = 3 * numPixels;
int packetLen = 4 + numBytes;
if (packetData == null || packetData.length != packetLen) {
// Set up our packet buffer
packetData = new byte[packetLen];
packetData[0] = (byte)0x00;
packetData[1] = (byte)0x00;
packetData[2] = (byte)(numBytes >> 8);
packetData[3] = (byte)(numBytes & 0xFF);
}
}
void setPixel(int number, int c)
{
println("set");
int offset = 4 + number * 3;
if (packetData == null || packetData.length < offset + 3) {
setPixelCount(number + 1);
}
packetData[offset] = (byte) (c >> 16);
packetData[offset + 1] = (byte) (c >> 8);
packetData[offset + 2] = (byte) c;
}
int getPixel(int number)
{
println("get");
int offset = 4 + number * 3;
if (packetData == null || packetData.length < offset + 3) {
return 0;
}
return (packetData[offset] << 16) | (packetData[offset + 1] << 8) | packetData[offset + 2];
}
void writePixels()
{
println("write");
if (packetData == null || packetData.length == 0) {
return;
}
if (output == null) {
return;
}
try {
output.write(packetData);
} catch (Exception e) {
dispose();
}
}
public void dispose()
{
if (output != null) {
println("Disconnected from OPC server");
}
socket = null;
output = pending = null;
}
public void run()
{
println("?");
if(output == null) { // No OPC connection?
try { // Make one!
socket = new Socket(host, port);
socket.setTcpNoDelay(true);
pending = socket.getOutputStream();
println("Connected to OPC server");
sendColorCorrectionPacket();
sendFirmwareConfigPacket();
output = pending;
} catch (ConnectException e) {
dispose();
} catch (IOException e) {
dispose();
}
}
try {
Thread.sleep(500);
}
catch(InterruptedException e) {
}
}
}
You should only have one class that extends PApplet.
Think of that class as your "main" class. Any other classes that need to use Processing functions will need an instance of that main class in order to access Processing functions. You can pass it in using the this keyword.
In fact, you're already doing that- notice that you pass this into your OPC class, and then store it in the parent parameter. You just never do anything with that parameter.
So step 1 is to remove the extends PApplet from your OBC class:
public class OPC implements Runnable
This will cause some compilation errors. That's okay, we'll fix them later.
Step 2 is to actually store the PApplet parent parameter in a class-level variable.
OPC(PApplet parent, String host, int port){
this.parent = parent;
//rest of constructor
Now that you have that, step 3 is to fix any compilation errors by using the parent variable to access Processing functions.
parent.println("pixel loading");
parent.loadPixels();
More info can be found in the Processing in eclipse tutorial. It's for eclipse, but the same principles apply in netbeans.
I am trying to detect faces and crop the face part in rectangular Image. I have done the face detection Part, but still not finding any help about how to crop the face part. Please have a look on my code..!
public class FaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
int cropXinit = 0;
int cropYint = 0;
int cropXend = 0;
int cropYend = 0;
Bitmap cropedBitmap;
Bitmap b;
private static final int MAX_FACES = 1;
private static String TAG = "FaceDetect";
private static boolean DEBUG = false;
protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler() {
// #Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
// load the photo
b = ChooseActivity.bitmap;
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();
setFace();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face[] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int[] fpx = null;
int[] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];
for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[2 * i] = (int) (eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int) eyescenter.y;
// set up right eye location
fpx[2 * i + 1] = (int) (eyescenter.x + eyesdist / 2);
fpy[2 * i + 1] = (int) eyescenter.y;
if (DEBUG)
Log.e(TAG,
"setFace(): face "
+ i
+ ": confidence = "
+ faces[i].confidence()
+ ", eyes distance = "
+ faces[i].eyesDistance()
+ ", pose = ("
+ faces[i]
.pose(FaceDetector.Face.EULER_X)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Y)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Z)
+ ")" + ", eyes midpoint = ("
+ eyescenter.x + "," + eyescenter.y
+ ")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
// if(eyescenter.x -eyesdist >= 0)
// {
// cropXinit = (int) (eyescenter.x -eyesdist) ;
// }
// else
// {
// cropXinit = 0;
// }
// if(eyescenter.x +eyesdist <= mFaceWidth)
// {
// cropXend = (int) (eyescenter.x +eyesdist) ;
// }
// else
// {
// cropXend = mFaceWidth;
// }
// if(eyescenter.y +eyesdist*2 <= mFaceHeight)
// {
// cropYend = (int) (eyescenter.y +eyesdist*2) ;
// }
// else
// {
// cropYend = mFaceHeight;
// }
// if(eyescenter.y -eyesdist >= 0)
// {
// cropYint = (int) (eyescenter.y -eyesdist) ;
// }
// else
// {
// cropYint = 0;
// }
// mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend,cropYend));
}
}
createBitmap(Bitmap source, int x, int y, int width, int height) receives a start X and start Y, and a width and height value, not an end X and end Y. If you change your commented-out code to this it should work:
mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend-cropXinit,cropYend-cropYinit));
I've already read a lot of posts,still can't solve it.
I have an app that have one problem related to OOM. It can show a picture stored(PNG type,approximately 1.33MB, 3072*1728 pixel) on server. My system heap size is 125MB
get by: long maxMemory = rt.maxMemory();
I new a thread( AsyncTask )to decode it by using new a handle to manage it.
I also down scale the picture when it is too big. Image can show in local device.
But, it still happen Grow heap (frag case) to 10.389MB for 2654224-byte allocation
And, it will take 2~10 sec to collect the garbage.
It's bad user experience. Did any one meet the same problem? Any Suggestion?
public class DownloadImageTask extends AsyncTask<String, Void, Bitmap> {
public static String TAG = "DownloadImageTask";
public enum DOWNLOAD_STATE {
eIdle, eProgressing, eError, eSuccess,
}
private DOWNLOAD_STATE state = DOWNLOAD_STATE.eIdle;
private Bitmap dlBitmap;
public DownloadImageTask(String imageUrl) {
execute(imageUrl);
}
public DOWNLOAD_STATE getDownloadState() {
return state;
}
public Bitmap getDownloadBitmap() {
return dlBitmap;
}
public Drawable getDownloadDrawable() {
#SuppressWarnings("deprecation")
Drawable dlDrawable = new BitmapDrawable(dlBitmap);
return dlDrawable;
}
protected Bitmap doInBackground(String... urls) {
String urldisplay = urls[0];
try {
dlBitmap = getBitmap(urldisplay);
}
if(dlBitmap==null)
state = DOWNLOAD_STATE.eError;
else
state = DOWNLOAD_STATE.eSuccess;
} catch (Exception e) {
dlBitmap = null;
state = DOWNLOAD_STATE.eError;
}
return dlBitmap;
}
protected void onPostExecute(Bitmap result) {
result = null;
}
public Bitmap getBitmap(String urldisplay) {
try {
Log.d(TAG,"getBitmap File");
InputStream in = new java.net.URL(urldisplay).openStream();
if(in == null)
Log.d(TAG,"in null");
// first decode, get the length & width of picture,didn't load the picture into memory
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, opts);
//in.close();
//compute the SampleSize (power of 2 is great)
int sampleSize = computeSampleSize(opts, -1, 1920 * 1080);//monitor limitation
Log.d(TAG,"samplesize ="+sampleSize);
// second decode,set sample size , generate thumbnail
in = new java.net.URL(urldisplay).openStream();
opts = new BitmapFactory.Options();
opts.inPreferredConfig = Bitmap.Config.RGB_565;
opts.inSampleSize = sampleSize;
opts.inInputShareable = true;
opts.inPurgeable = true;
Bitmap bmp = BitmapFactory.decodeStream(in, null, opts);
in.close();
return bmp;
} catch (Exception err) {
Log.e(TAG, "error: " + err.toString());
return null;
}
}
public static int computeSampleSize(BitmapFactory.Options options,
int minSideLength, int maxNumOfPixels) {
Log.d(TAG,"computeSampleSize");
int initialSize = computeInitialSampleSize(options, minSideLength,
maxNumOfPixels);
int roundedSize;
if (initialSize <= 8) {
roundedSize = 1;
while (roundedSize < initialSize) {
roundedSize <<= 1;
}
} else {
roundedSize = (initialSize + 7) / 8 * 8;
}
return roundedSize;
}
private static int computeInitialSampleSize(BitmapFactory.Options options,
int minSideLength, int maxNumOfPixels) {
Log.d(TAG,"computeInitialSampleSize");
try{
double w = options.outWidth;
double h = options.outHeight;
int lowerBound = (maxNumOfPixels == -1) ? 1 : (int) Math.ceil(Math
.sqrt(w * h / maxNumOfPixels));
int upperBound = (minSideLength == -1) ? 128 : (int) Math.min(
Math.floor(w / minSideLength), Math.floor(h / minSideLength));
if (upperBound < lowerBound) {
// return the larger one when there is no overlapping zone.
return lowerBound;
}
if ((maxNumOfPixels == -1) && (minSideLength == -1)) {
return 1;
} else if (minSideLength == -1) {
return lowerBound;
} else {
return upperBound;
}
}catch (Exception e){
Log.d(TAG,"computeInitialSampleSize err:"+e.toString());
return 1;
}
}
}
And, show the picture in ImageSwitcher
myImageSwitcher.setImageDrawable(task.getDownloadDrawable());
I have huge .csv files (the biggest one, 72mb) that I need to load to my Android App. They contain matrices e.g [7500x1000], which I later use to multiply another one. Because their are too big to read them at one time () I'm reading line after line and perform some multiplication. I tested this on Nexus 4 and It took something about 15 minutes to do everything. When I copied the code from Android project to JAVA one(the only difference is that in Android i'm using Bitmap and in JAVA BufferedImage) and ran this as java application it took few seconds. Do you have any idea why is so and how to shorten this time? Here Is my code:
Android:
public class NetworkThread extends Thread {
private static boolean threadIsWorking = false;
private Context context;
private SQLiteHandler sql;
private static NetworkThread REFERENCE = null;
private String w1 = "w1.csv";
private String w2 = "w2.csv";
private String w3 = "w3.csv";
private String w4 = "w4.csv";
String NEURON_PATH = "/data/data/com.ZPIProject/neuronFiles/";
public static NetworkThread getInstance(Context context, SQLiteHandler sql) {
if (REFERENCE == null)
REFERENCE = new NetworkThread(context, sql);
return REFERENCE;
}
private NetworkThread(Context context, SQLiteHandler sql) {
this.context = context;
this.sql = sql;
}
#Override
public void start() {
if (!threadIsWorking) {
threadIsWorking = true;
try {
Log.i("MATRIX", "THREAD ID: " + Thread.currentThread().getId());
Bitmap bit = BitmapFactory.decodeResource(context.getResources(), R.drawable.aparat_small);
double[] imageInfo = ImageUtils.getImageInfo(bit);
copyNeuronWeightsToMemoryOnPhone();
double[] m4 = multiplyImageWithNeuronWeights(imageInfo);
List<WeightWithId> best10 = findBest10Results(m4);
for (int i = 0; i < best10.size(); i++) {
Log.e("IDS", best10.get(i).getId() + "");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
private List<WeightWithId> findBest10Results(double[] m4) throws SQLException, CloneNotSupportedException {
List<WeightWithId> best10 = new ArrayList<WeightWithId>(20);
for (int j = 0; j < 19; j++) {
Map<Integer, double[]> readDescriptions = sql.getDescriptionForShoeId(j * 100, j * 100 + 100);
List<WeightWithId> distances = new ArrayList<WeightWithId>(100);
for (Integer i : readDescriptions.keySet()) {
double dist = dist(m4, readDescriptions.get(i));
distances.add(new WeightWithId(i, dist));
}
Collections.sort(distances);
for (int i = 0; i < 10; i++) {
best10.add(distances.get(i).clone());
}
Collections.sort(best10);
if (best10.size() > 10) {
for (int i = 10; i < best10.size(); i++) {
best10.remove(i);
}
}
}
return best10;
}
private double[] multiplyImageWithNeuronWeights(double[] imageInfo) throws IOException {
Log.i("MATRIX MULTIPLY", "M1");
double[] m1 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w1, imageInfo, 1000, false);
Log.i("MATRIX MULTIPLY", "M2");
double[] m2 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w2, m1, 500, false);
Log.i("MATRIX MULTIPLY", "M3");
double[] m3 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w3, m2, 250, false);
Log.i("MATRIX MULTIPLY", "M4");
return MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w4, m3, 50, true);
}
private void copyNeuronWeightsToMemoryOnPhone() throws IOException {
Log.i("MATRIX COPY", "W1");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w1, context);
Log.i("MATRIX COPY", "W2");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w2, context);
Log.i("MATRIX COPY", "W3");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w3, context);
Log.i("MATRIX COPY", "W4");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w4, context);
}
private double dist(double[] a, double[] b) {
double result = 0;
for (int i = 0; i < a.length; i++)
result += (a[i] - b[i]) * (a[i] - b[i]);
return result;
}
}
Matrix Operations:
public class MatrixMaker {
public static double[] multiplyImageInfoWithCsvMatrix(String csvFilePath, double[] imageInfo, int expectedSize, boolean lastOne) throws IOException {
InputStream inputStream = new FileInputStream(new File(csvFilePath));
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String line;
//counter - to which position calculated value should be setted
int counter = 0;
//result array
double[] multipliedImageInfoWithCsvMatrix = new double[expectedSize + 1];
//declaration of array for read line (parsed into double array)
double[] singleLineFromCsv = new double[imageInfo.length];
while ((line = bufferedReader.readLine()) != null) {
//splitting and parsing values from read line
String[] splitted = line.split(",");
for (int i = 0; i < splitted.length; i++) {
singleLineFromCsv[i] = Double.valueOf(splitted[i]);
}
//multiply imageInfo array with single row from csv file
multipliedImageInfoWithCsvMatrix[counter] = multiplyOneRow(imageInfo, singleLineFromCsv);
//ugly flag 'lastOne' needed to other business case
if (!lastOne) {
multipliedImageInfoWithCsvMatrix[counter] = 1d / (1 + Math.exp(-multipliedImageInfoWithCsvMatrix[counter]));
}
counter++;
//logging progress
if (counter % 100 == 0)
Log.i("MULTIPLY PROGRESS", counter + " " + System.currentTimeMillis());
}
if (!lastOne)
multipliedImageInfoWithCsvMatrix[expectedSize] = 1d;
bufferedReader.close();
inputStream.close();
return multipliedImageInfoWithCsvMatrix;
}
private static double multiplyOneRow(double first[], double second[]) {
double result = 0d;
for (int i = 0; i < first.length; i++) {
result += first[i] * second[i];
}
return result;
}
}
onCreate in one of Activity
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
final SQLiteHandler sql = new SQLiteHandler(this);
sql.init();
NetworkThread.getInstance(this, sql).start();
} catch (IOException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
JAVA:
Multiply equivalent of Matrix Maker
import java.io.*;
public class Multiply {
public static double[] multiplyMatrixWithCsvMatrix(String csvFilePath, double[] imageInfo,
int expectedSize, boolean lastOne) throws IOException {
InputStream inputStream = new FileInputStream(new File(csvFilePath));
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String line;
// counter - to which position calculated value should be setted
int counter = 0;
// result array
double[] multipliedImageInfoWithCsvMatrix = new double[expectedSize + 1];
// declaration of array for read line (parsed into double array)
double[] singleLineFromCsv = new double[imageInfo.length];
while ((line = bufferedReader.readLine()) != null) {
// splitting and parsing values from read line
String[] splitted = line.split(",");
for (int i = 0; i < splitted.length; i++) {
singleLineFromCsv[i] = Double.valueOf(splitted[i]);
}
// multiply imageInfo array with single row from csv file
multipliedImageInfoWithCsvMatrix[counter] = multiplyOneRow(imageInfo, singleLineFromCsv);
// ugly flag 'lastOne' needed to other business case
if (!lastOne) {
multipliedImageInfoWithCsvMatrix[counter] = 1d / (1 + Math
.exp(-multipliedImageInfoWithCsvMatrix[counter]));
}
counter++;
// logging progress
if (counter % 100 == 0)
System.out.println(counter + " " + System.currentTimeMillis());
}
if (!lastOne)
multipliedImageInfoWithCsvMatrix[expectedSize] = 1d;
bufferedReader.close();
inputStream.close();
return multipliedImageInfoWithCsvMatrix;
}
private static double multiplyOneRow(double first[], double second[]) {
double result = 0d;
for (int i = 0; i < first.length; i++) {
result += first[i] * second[i];
}
return result;
}
}
Executable class
public class MRunner {
private static final int RED_INDEX = 0;
private static final int GREEN_INDEX = 2500;
private static final int BLUE_INDEX = 5000;
private static final String w1 = "w1.csv";
private static final String NEURON_PATH = "C:\\Users\\Vortim\\Desktop\\";
public static void main(String[] args) {
new Thread(new Runnable() {
#Override
public void run() {
try {
Multiply.multiplyMatrixWithCsvMatrix(NEURON_PATH + w1, getImageInfo(ImageIO
.read(new File("C:\\Users\\Vortim\\git\\MatrixMaker\\but.png"))), 1000,
false);
} catch (IOException e) {
e.printStackTrace();
}
}
}).start();
}
public static double[] getImageInfo(BufferedImage source) {
double[] result = new double[7501];
int width = source.getWidth();
int height = source.getHeight();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
if (x == 0) {
result[y + RED_INDEX] = 255d;
result[y + GREEN_INDEX] = 255d;
result[y + BLUE_INDEX] = 255d;
} else if (y == 0) {
result[x * 50 + RED_INDEX] = 255d;
result[x * 50 + GREEN_INDEX] = 255d;
result[x * 50 + BLUE_INDEX] = 255d;
} else {
int pixel = source.getRGB(x, y);
double r = (pixel) >> 16 & 0xff;
double g = (pixel) >> 8 & 0xff;
double b = (pixel) & 0xff;
result[x * y + RED_INDEX] = 1d - (r / 255d);
result[x * y + GREEN_INDEX] = 1d - (g / 255d);
result[x * y + BLUE_INDEX] = 1d - (b / 255d);
}
}
}
result[7500] = 1;
return result;
}
}