When using serBitmap through WallpaperManager ie., wallpaperManager.setBitmap(wallpaperBitmap); I am getting black screen when new wallpaper been set. How to remove the black screen when transition from old wallpaper to new wallpaper?
From where this black screen been displayed in aosp 10?
Below is the code snippet:
setUserWallpaper(this,"test");
private void setUserWallPaper(final Context context, final String userName) {
setUserWallpaperRunnable =
new Runnable() {
#Override
public void run() {
Log.i(TAG, "setUserWallPaper() run");
InputStream in = null;
OutputStream out = null;
try {
in = new BufferedInputStream(
context.getContentResolver()
.openInputStream(
Uri.parse(
"content://test.app.personalization.provider/" +
"wall_paper" +
"?user=" +
userName +
"&encryption=false"))
);
Bitmap wallpaperBitmap = BitmapFactory.decodeStream(in);
if(wallpaperBitmap != null){
WallpaperManager wallpaperManager = WallpaperManager.getInstance(context);
wallpaperManager.setBitmap(wallpaperBitmap);
} else {
Log.i(TAG, "user wallpaper is null.");
}
Log.i(TAG, "setUserWallPaper() end");
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (in != null) {
in.close();
}
if(out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
};
sUserCustomizeWorker.post(setUserWallpaperRunnable);
}
I know this bug.
Problem: Bitmap is too big for wallpaper so you have to rescale the bitmap.
Rule: bitmap width < device width * 2 and bitmap height < device height * 2.
Related
I have built an android custom camera in this way:
//CameraActivity.java
public class CameraActivity extends AppCompatActivity implements CameraPreview.OnCameraStatusListener {
String path, TAG = "CameraActivity";
private CameraPreview cameraPreview;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_camera);
Button takePhoto = findViewById(R.id.takePhoto);
takePhoto.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Log.e(TAG, "===takePhoto()===");
if (cameraPreview != null) {
cameraPreview.takePicture();
}
}
});
//Start camera
initCameraPreview();
}
private void initCameraPreview() {
Log.e(TAG, "===initCameraPreview()===");
cameraPreview = findViewById(R.id.cameraPreview);
cameraPreview.setOnCameraStatusListener(this);
}
#RequiresApi(api = Build.VERSION_CODES.N)
#Override
public void onCameraStopped(byte[] data) throws JSONException {
Log.e("TAG", "===onCameraStopped===");
File mSaveFile = getOutputMediaFile();
saveToFile(data, mSaveFile);
}
public File getOutputMediaFile() {
File mediaStorageDir =
new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "CustomCameraPics");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.e(TAG, "failed to create directory");
return null;
}
}
long time = System.currentTimeMillis();
File file = new File(mediaStorageDir.getPath() + File.separator + "myPic_" + time + ".png");
path = file.getPath();
Log.e("imagePath", path);
return file;
}
#RequiresApi(api = Build.VERSION_CODES.N)
public void saveToFile(byte[] bytes, File file) throws JSONException {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
try {
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
out.flush();
out.close();
Log.e("Image", "Saved");
Toast.makeText(CameraActivity.this, "Image Saved", Toast.LENGTH_SHORT).show();
} catch (Exception e) {
e.printStackTrace();
}
}
}
//activity_camera.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.sukshi.mycamera.CameraActivity">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent" >
<com.shank.mycamera.CameraPreview
android:id="#+id/cameraPreview"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:text="Take pic"
android:id="#+id/takePhoto"/>
</RelativeLayout>
</RelativeLayout>
//CameraPreview.java
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraPreview";
private OnCameraStatusListener listener;
private Camera camera;
private SurfaceHolder holder;
private Camera.PictureCallback pictureCallback = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
camera.stopPreview();
} catch (Exception e) {
}
if (null != listener) {
try {
listener.onCameraStopped(data);
} catch (JSONException e) {
e.printStackTrace();
}
}
}
};
public void takePicture() {
Log.e(TAG, "===takePicture()===");
if (camera != null) {
try {
camera.takePicture(null, null, pictureCallback);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public interface OnCameraStatusListener {
void onCameraStopped(byte[] data) throws JSONException;
}
public void setOnCameraStatusListener(OnCameraStatusListener listener) {
this.listener = listener;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.e(TAG, "==surfaceCreated==");
camera = getCameraInstance();
try {
camera.setPreviewDisplay(holder);
} catch (Exception e) {
Log.e(TAG, "Error setting camera preview", e);
camera.release();
camera = null;
}
//update()
if (camera != null) {
camera.startPreview();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
}
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.e(TAG, "==surfaceChanged==");
try {
camera.stopPreview();
} catch (Exception e) {
}
try {
camera.setPreviewDisplay(holder);
camera.startPreview();
} catch (Exception e) {
Log.e(TAG, "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.e(TAG, "==surfaceDestroyed==");
camera.release();
camera = null;
}
public static Camera getCameraInstance() {
Camera camera = null;
int cameraId = 0;
boolean frontCameraFound = false;
try {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
Camera.Parameters parameters = camera.getParameters();
camera.setDisplayOrientation(90);
parameters.setRotation(270);
camera.setParameters(parameters);
} catch (Exception e) {
Log.e(TAG, "getCamera failed");
}
return camera;
}
}
It saves the image when you click 'TAKE PIC' button.
My requirement: the whole image that the user can see on the screen in preview should be saved.
The problem I'm facing is that in some of the phones the image/bitmap saved doesn't contain everything that is shown to the user on screen.
As #CommonsWare has explained, there is no requirement that a captured picture should be same as preview picture. You can skip camera.takePicture() altogether, and in your cameraPreview.takePicture(), save the latest preview frame as bitmap.
The advantages of this approach are that your are guaranteed that the same image as the user sees in the preview is saved, and it happens with no delay.
The drawback is that the resolution and quality for image returned from camera.takePicture() may be noticeably better.
If you not satisfied with preview frame quality, you can use the same size for setPreviewSize() and for setPictureSize(), if the size is supported for both getSupportedPreviewSizes() and getSupportedPictureSizes(): usually you can find a satisfactory pair.
If you are not satisfied with the preview frame resolution, at least choose the same aspect ratio for setPreviewSize() and for setPictureSize(). Some devices produce really weird results when the two don't match.
But note that these steps can not guarantee that the captured image will be the same as previewed, even only because takePicture() inevitably happens with some delay.
One more remark: if you target Android N and above, consider switching from the deprecated Camera API to the camera2 API.
I'm having a problem with my camera app.
My app has:
1) a CameraActivity.class and
2) a CameraPreview.class.
CameraPreview implement a surfaceView where it's called from CameraActivity for the actual preview. (Also in CameraActivity I have the parameters)
Now the problem: When the preview is starting the preview is stretched.
I tried a lot of things (so many that I cannot recall)
I need someone to tell me what to write and where. Thanks in advance.
Here is the CameraActivity(Not all the code but the important I think)
private PictureCallback mPicture = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
// Replacing the button after a photho was taken.
flBtnContainer.setVisibility(View.GONE);
ibRetake.setVisibility(View.VISIBLE);
ibUse.setVisibility(View.VISIBLE);
// File name of the image that we just took.
fileName = "IMG_" + new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()).toString() + ".jpg";
// Creating the directory where to save the image. Sadly in older
// version of Android we can not get the Media catalog name
File mkDir = new File(sdRoot, dir);
mkDir.mkdirs();
// Main file where to save the data that we recive from the camera
File pictureFile = new File(sdRoot, dir + fileName);
try {
FileOutputStream purge = new FileOutputStream(pictureFile);
purge.write(data);
purge.close();
} catch (FileNotFoundException e) {
Log.d("DG_DEBUG", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
}
// Adding Exif data for the orientation. For some strange reason the
// ExifInterface class takes a string instead of a file.
try {
exif = new ExifInterface("/sdcard/" + dir + fileName);
exif.setAttribute(ExifInterface.TAG_ORIENTATION, "" + orientation);
exif.saveAttributes();
} catch (IOException e) {
e.printStackTrace();
}
Intent intent = new Intent(CameraActivity.this, PicturePreview.class);
Bundle extras = new Bundle();
extras.putString("ImagePath", String.valueOf(pictureFile));
intent.putExtras(extras);
startActivity(intent);
//SendBroadcasts let's us instantly update the SD card with our image
sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://"+Environment.getExternalStorageDirectory())));
}
};
private void createCamera() {
// Create an instance of Camera
mCamera = getCameraInstance();
// Setting the right parameters in the camera
Camera.Parameters params = mCamera.getParameters();
params.setRotation(90);
mCamera.setParameters(params);
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview, 0);
}
#Override
protected void onResume() {
super.onResume();
// Test if there is a camera on the device and if the SD card is
// mounted.
if (!checkCameraHardware(this)) {
Intent i = new Intent(this, NoCamera.class);
startActivity(i);
finish();
} else if (!checkSDCard()) {
Intent i = new Intent(this, NoSDCard.class);
startActivity(i);
finish();
}
// Creating the camera
createCamera();
// Register this class as a listener for the accelerometer sensor
////sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
protected void onPause() {
super.onPause();
// release the camera immediately on pause event
releaseCamera();
// removing the inserted view - so when we come back to the app we
// won't have the views on top of each other.
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
And here is the CameraPreview.class
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
boolean isPreviewRunning = true;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mHolder.setFixedSize(100, 100);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the
// preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
} catch (IOException e) {
Log.d("DG_DEBUG", "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
if (isPreviewRunning){
return;
}
isPreviewRunning = true;
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// make any resize, rotate or reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d("DG_DEBUG", "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
}
Can someone tell what am I missing? If possible I can chat in Facebook or something for faster resolve of my problem..
Update: #LikeWhiteOnRice's solution.
Here is my original code
enter image description here
Here is with LikeWhiteOnRice's code:
enter image description here
Any thoughts?
I added the code below to my camera preview class and it works for most devices. Just so you are aware, the camera library in Android is horrible and a huge pain to work with.
Put this function in your CameraPreview class:
private Camera.Size getOptimalSize(List<Camera.Size> sizes, int h, int w) {
final double ASPECT_TOLERANCE = 0.05;
double targetRatio = (double) w/h;
if (sizes == null) {
return null;
}
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
In your surefaceCreated function, add this before you start your preview:
Camera.Parameters cameraParameters = mCamera.getParameters();
List<Camera.Size> previewSizes = cameraParameters.getSupportedPreviewSizes();
Camera.Size optimalPreviewSize = getOptimalSize(previewSizes, getResources().getDisplayMetrics().widthPixels, getResources().getDisplayMetrics().heightPixels);
cameraParameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
mCamera.setParameters(cameraParameters);
Edit: Also, I'm not sure if you want
mHolder.setFixedSize(100, 100);
in your constructor.
I need to take a picture with the back camera in an android service but after reading the docs it seems you need a SurfaceView is it possible to take a picture without showing anything to the user?
Edit:
Will this work?
SurfaceTexture surfaceTexture = new SurfaceTexture(10);
Camera camera = Camera.open();
camera.getParameters().setPreviewSize(1, 1);
camera.setPreviewTexture(surfaceTexture);
camera.startPreview();
camera.takePicture(null, pictureCallback, null);
100% working . click picture from front camera using service .
public class MyService extends Service {
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public void onCreate() {
super.onCreate();
CapturePhoto();
}
private void CapturePhoto() {
Log.d("kkkk","Preparing to take photo");
Camera camera = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int frontCamera = 1;
//int backCamera=0;
Camera.getCameraInfo(frontCamera, cameraInfo);
try {
camera = Camera.open(frontCamera);
} catch (RuntimeException e) {
Log.d("kkkk","Camera not available: " + 1);
camera = null;
//e.printStackTrace();
}
try {
if (null == camera) {
Log.d("kkkk","Could not get camera instance");
} else {
Log.d("kkkk","Got the camera, creating the dummy surface texture");
try {
camera.setPreviewTexture(new SurfaceTexture(0));
camera.startPreview();
} catch (Exception e) {
Log.d("kkkk","Could not set the surface preview texture");
e.printStackTrace();
}
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFileDir=new File("/sdcard/CaptureByService");
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
pictureFileDir.mkdirs();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "ServiceClickedPic_" + "_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File mainPicture = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(mainPicture);
fos.write(data);
fos.close();
Log.d("kkkk","image saved");
} catch (Exception error) {
Log.d("kkkk","Image could not be saved");
}
camera.release();
}
});
}
} catch (Exception e) {
camera.release();
}
}
}
You can set Width and Height of SurfaceView to 1dp and margintop to -10,so that it wont display it on screen but it functions as normal and you can Take picture without displaying Surface View to User
<SurfaceView
android:layout_width="1dp"
android:layout_height="1dp"
android:layout_marginTop="-10dp"
...
/>
in that case you can just use SurfaceTexture
SurfaceTexture surfaceTexture = new SurfaceTexture(10);
Camera camera = Camera.open();
camera.getParameters().setPreviewSize(1, 1);
camera.setPreviewTexture(surfaceTexture);
Yes it is possible. call your Camera Callback method to take picture from camera and Dont specify the size for preview and just start the preview it will work .
Omit this Step :
param.setPreviewSize(122,133);
or use
param.setPreviewSize(1, 1);
I am trying to record audio by front camera in one of my Activities using MediaRecorder. Part of the code is shown below.if I open back Camera, it works well. But if I open front Camera, there is No Runtime Error but I can't open the output MP4
part of CameraService.java
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d("TAG", "======= service in onStartCommand");
if (Util.checkCameraHardware(this)) {
mCamera = Util.getCameraInstance();
if (mCamera != null) {
SurfaceView sv = new SurfaceView(this);
WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
WindowManager.LayoutParams params = new WindowManager.LayoutParams(1, 1,
WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
PixelFormat.TRANSLUCENT);
SurfaceHolder sh = sv.getHolder();
sv.setZOrderOnTop(true);
sh.setFormat(PixelFormat.TRANSPARENT);
sh.addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
Camera.Parameters params = mCamera.getParameters();
mCamera.setParameters(params);
Camera.Parameters p = mCamera.getParameters();
List<Camera.Size> listSize;
listSize = p.getSupportedPreviewSizes();
Camera.Size mPreviewSize = listSize.get(2);
Log.v("TAG", "preview width = " + mPreviewSize.width
+ " preview height = " + mPreviewSize.height);
p.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
listSize = p.getSupportedPictureSizes();
Camera.Size mPictureSize = listSize.get(2);
Log.v("TAG", "capture width = " + mPictureSize.width
+ " capture height = " + mPictureSize.height);
p.setPictureSize(mPictureSize.width, mPictureSize.height);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
mCamera.unlock();
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setProfile(CamcorderProfile.get(1,CamcorderProfile.QUALITY_HIGH));
mMediaRecorder.setOutputFile(Util.getOutputMediaFile(Util.MEDIA_TYPE_VIDEO).getPath());
mMediaRecorder.setPreviewDisplay(holder.getSurface());
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d("TAG", "====== IllegalStateException preparing MediaRecorder: " + e.getMessage());
} catch (IOException e) {
Log.d("TAG", "====== IOException preparing MediaRecorder: " + e.getMessage());
}
mMediaRecorder.start();
Log.d("TAG", "========= recording start");
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
mMediaRecorder.stop();
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder=null;
mCamera.stopPreview();
mCamera.release();
Log.d("TAG", "========== recording finished.");
}
}, 10000);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
wm.addView(sv, params);
} else {
Log.d("TAG", "==== get Camera from service failed");
}
} else {
Log.d("TAG", "==== There is no camera hardware on device.");
}
return super.onStartCommand(intent, flags, startId);
}
part of Util.java
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(1);
} catch (Exception e) {
Log.d("TAG", "Open camera failed: " + e);
}
return c;
}
Update:
I change two lines shown below, switching to the back camera and record video well.
CameraService.java mMediaRecorder.setProfile(CamcorderProfile.get(1,CamcorderProfile.QUALITY_HIGH)); => mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
Util.java c = Camera.open(1); => c = Camera.open();
Once i get the same error in my case my i set the video sizes or wrong.
protected void startRecording() {
try {
mrec = new MediaRecorder();
mCamera.unlock();
mrec.setCamera(mCamera);
//Set audio source
mrec.setAudioSource(MediaRecorder.AudioSource.MIC);
//set video source
mrec.setVideoSource(MediaRecorder.VideoSource.CAMERA);
//set output format
mrec.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
int width = 320;
int height = 240;
try {
//get the available sizes of the video
List<Size> tmpList = getSupportedVideoSizes();
final List<Size> sizeList = new Vector<Size>();
// compare the apsect ratio of the candidate sizes against the
// real ratio
Double aspectRatio = (Double.valueOf(getWindowManager()
.getDefaultDisplay().getHeight()) / getWindowManager()
.getDefaultDisplay().getWidth());
for (int i = tmpList.size() - 1; i > 0; i--) {
Double tmpRatio = Double.valueOf(tmpList.get(i).height)
/ tmpList.get(i).width;
if (EnableLog.LOG_TAG) {
Log.e("Width & height", tmpList.get(i).width + " x "
+ tmpList.get(i).height);
}
if (Math.abs(aspectRatio - tmpRatio) < .15) {
width = tmpList.get(i).width;
height = tmpList.get(i).height;
sizeList.add(tmpList.get(i));
}
}
} catch (Exception e) {
e.printStackTrace();
}
// set the size of video.
// If the size is not applicable then throw the media recorder stop
// -19 error
mrec.setVideoSize(width, height);
// Set the video encoding bit rate this changes for the high, low.
// medium quality devices
mrec.setVideoEncodingBitRate(1700000);
//Set the video frame rate
mrec.setVideoFrameRate(30);
//set audio encoder format
mrec.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
//set video encoder format
mrec.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//Show the display preview
mrec.setPreviewDisplay(surfaceHolder.getSurface());
//output file path
mrec.setOutputFile(output_path);
mrec.prepare();
mrec.start();
} catch (IllegalStateException e) {
Crashlytics.logException(e);
e.printStackTrace();
} catch (IOException e) {
Crashlytics.logException(e);
e.printStackTrace();
}
}
public List<Size> getSupportedVideoSizes() {
if (params.getSupportedVideoSizes() != null) {
return params.getSupportedVideoSizes();
} else {
// Video sizes may be null, which indicates that all the supported
// preview sizes are supported for video recording.
return params.getSupportedPreviewSizes();
}
}
This is works for me. Try that. Maybe it will help. I am not sure try this.
I've successfully added a watermark to the preview of the camera image that the user takes on my android app, however when it's sent to Instagram or Tumblr, the watermark isn't there.
I believe this is because it is sharing the image from local storage, and has nothing to do with the preview.
I think I need to modify the 'take picture' code for the camera, so that when it takes the photo it converts it to a bitmap, adds it to a canvas with the watermark and then saves it, but I'm not sure how to go about this.
I believe this is the source of the file being shared
final File fileToUpload = new File(StorageUtils.getStoragePath(ShareActivity.this), StorageUtils.DEFAULT_IMAGE);
Here is the take picture code for the camera.
protected void takePicture() {
if (cameraPreview == null) return;
Camera camera = cameraPreview.getCamera();
if (camera == null) return;
camera.takePicture(null, null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
if (data == null || data.length == 0) return;
File imageFile = new File(StorageUtils.getStoragePath(CameraActivity.this), StorageUtils.DEFAULT_IMAGE);
File parentDir = imageFile.getParentFile();
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
Log.d(TAG, "Failed to create directory: " + parentDir.getAbsolutePath());
return;
}
}
try {
FileOutputStream fos = new FileOutputStream(imageFile);
fos.write(data);
fos.close();
} catch (IOException e) {
Log.d(TAG, "Failed to save file: " + imageFile.getAbsolutePath());
e.printStackTrace();
return;
}
//workaround for bug with facing camera introduced (intentionally?) in 4.0+
if (isCameraFacingFront && Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
Bitmap bitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath());
Matrix matrix = new Matrix();
//flip image vertically
matrix.setRotate(180);
matrix.postScale(-1, 1);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);
bitmap.recycle();
try {
rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 80, new FileOutputStream(imageFile));
rotatedBitmap.recycle();
} catch (FileNotFoundException e) {
Log.d(TAG, "Failed to rotate and save bitmap: " + imageFile.getAbsolutePath());
e.printStackTrace();
return;
}
}
Intent intent = new Intent(CameraActivity.this, ShareActivity.class);
intent.putExtra(ShareActivity.PARAM_IMAGE_FILE, imageFile.getAbsolutePath());
if (business != null)
intent.putExtra(ShareActivity.PARAM_BUSINESS, business);
startActivity(intent);
}
});
}
Or I may be way off base. Any help or pointing in the right direction is much appreciated! Thank you!
Adding to my comment of, "You're on the right track. After you've gotten the picture, decode it, create a new Canvas for it, draw the watermark on the canvas, and save that image. You're pretty much just going to repeat the code for flipping the image, just drawing on the canvas right before saving the new image."...
I got bored and did it for you:
protected void takePicture() {
if (cameraPreview == null) return;
Camera camera = cameraPreview.getCamera();
if (camera == null) return;
camera.takePicture(null, null, null, new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
File imageFile = new File(StorageUtils.getStoragePath(CameraActivity.this), StorageUtils.DEFAULT_IMAGE);
File parentDir = imageFile.getParentFile();
if(!createImageFromCamera(data, imageFile, parentDir) return;
//workaround for bug with facing camera introduced (intentionally?) in 4.0+
boolean requiresImageFlip = isCameraFacingFront && Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH;
Bitmap adjustedBitmap = getBitmap(imageFile, requiresImageFlip);
if(!drawWatermark(adjustedBitmap)) return;
if(!saveImage(imageFile, adjustedBitmap)) return;
Intent intent = new Intent(CameraActivity.this, ShareActivity.class);
intent.putExtra(ShareActivity.PARAM_IMAGE_FILE, imageFile.getAbsolutePath());
if(business != null) intent.putExtra(ShareActivity.PARAM_BUSINESS, business);
startActivity(intent);
}
});
}
private Bitmap getBitmap(File imageFile, boolean flipVertically){
Bitmap bitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath());
Matrix matrix = new Matrix();
if(flipVertically){
matrix.setRotate(180);
matrix.postScale(-1, 1);
}
Bitmap adjustedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);
bitmap.recycle();
return adjustedBitmap;
}
private boolean saveImage(File imageFile, Bitmap bitmap){
try {
bitmap.compress(Bitmap.CompressFormat.JPEG, 80, new FileOutputStream(imageFile));
bitmap.recycle();
return true;
}
catch (FileNotFoundException e) {
Log.d(TAG, "Failed to rotate and save bitmap: " + imageFile.getAbsolutePath());
e.printStackTrace();
return false;
}
}
private boolean drawWatermark(Bitmap bitmap){
try{
Canvas canvas = new Canvas(bitmap);
canvas.drawBitmap(watermarkBitmap); // However you're drawing the watermark on the canvas
return true;
}
catch(Exception e){
e.printStackTrace();
return false;
}
}
private boolean createImageFromCamera(byte[] data, File imageFile, File parentDir){
if (data == null || data.length == 0) return false;
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
Log.d(TAG, "Failed to create directory: " + parentDir.getAbsolutePath());
return false;
}
}
try {
FileOutputStream fos = new FileOutputStream(imageFile);
fos.write(data);
fos.close();
}
catch (IOException e) {
Log.d(TAG, "Failed to save file: " + imageFile.getAbsolutePath());
e.printStackTrace();
return false;
}
return true;
}
Replace your entire takePicture() method with that and it should do everything you're looking for.