LBP Cascade (OpenCV) won't load on android studio emulator - java

I've been trying to get some object tracking under my belt, and I managed to get a nice LBP cascade tracker running in C++, using OpenCV 3.1.
I wanted to try and get this robot tracker running on a phone, so I'm trying to transition it over to AndroidStudio. Unfortunately, everything except the actual cascade loading is working. I can get the camera to pull up in the app, I can have it show off the greyscale image instead of an rgb image, etc. It's just that cascade won't load, so the whole thing won't work.
Specs: Android Studio 1.5.1 emulating a API 19 phone (using the x86 google apis), using Opencv 3.1.0.
The CameraActivity code in question is here -
package <package name retracted for reasons>;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.Toast;
import java.util.Vector;
import <package name retracted here for reasons>.R;
public class CameraActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public CameraActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.camera_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
CascadeClassifier robot_cascade;
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Trying to get robot cascade");
robot_cascade = new CascadeClassifier(Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml");
String robot_cascade_name = Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml";
Log.d(TAG, "location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "--(!)Error loading robot cascade");
}
Log.d(TAG, "Made it through loading cascade!");
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat frameGrey = new Mat();
Mat endFrame = new Mat();
endFrame = inputFrame.rgba();
MatOfRect robots = new MatOfRect();
Imgproc.cvtColor(inputFrame.rgba(), frameGrey, Imgproc.COLOR_BGRA2GRAY);
Imgproc.equalizeHist(frameGrey, frameGrey);
/*robot_cascade.detectMultiScale(frameGrey, robots, 1.2, 120, 0, new Size(200, 200), new Size(300, 300));
Log.d(TAG, "Found %x robots" + robots.toArray().length);
for (Rect rect : robots.toArray()) {
Imgproc.rectangle(endFrame, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
Log.d(TAG, "Robot at point ( %x , %x )"+(rect.x+rect.width/2)+(rect.y+rect.height/2));
} */
return endFrame;
//return inputFrame.rgba();
}
}
For right now I have the cascade.detectMultiScale commented out while it's not working. Running it yields:
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: Trying to get robot cascade
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: location is /storage/sdcard/cascade.xml
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: --(!)Error loading robot cascade
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: Made it through loading cascade!
And then it goes down to the onCameraFrame method happily, and currently just outputs what it's getting (I've been able to play with it to get grayscale out and so on)
The problem appears to be in the onCameraViewStared class -
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Trying to get robot cascade");
robot_cascade = new CascadeClassifier(Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml");
String robot_cascade_name = Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml";
Log.d(TAG, "location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "--(!)Error loading robot cascade");
}
Log.d(TAG, "Made it through loading cascade!");
}
Robot cascade always comes up empty.
I am sure that the cascade.xml is actually on the emulated phone - if I check through adb, it says it's chilling right there, and the android device monitor also shows that it's there.
The only thing I can think of is that ADM says that the permissions are -rwxrwx---, but I've got the WRITE_EXTERNAL_STORAGE and READ_EXTERNAL_STORAGE permissions in the manifest file, so I would think that'd be fine. (I could be completely wrong though, please correct me if I am).
Just in case it was important, I did try to chmod the cascade.xml to be read/writable by any user (not just owner and group), but it kept giving me "Bad Mode" no matter what I tried. The sd is mounted as read/writeable (because I was able to push the file onto the sd card in the first place), and I was in su, so I have no idea why it won't let me do that.
Edit: chmod is having really strange behavior - it basically won't do anything and fail quietly, or it'll say that it's a read-only file system... even though I can make files and directories and delete them no problem.
Edit x2: Moved file to /data/local, and it still doesn't work, but chmod worked on it so now I have all permissions. Still trying to figure out why cascade won't load it though.
So yeah, can't load a cascade that is definitely on the emulated sd card.

So, got it to work on the emulator (finally). Apparently the SD card on the emulator just doesn't allow chmod to work at all, so I moved the file to data/local. Then I changed everything to read from ("./data/local/cascade.xml"), and added robot_cascade.load(".data/local/cascade.xml); after the creation of the cascade classifier. Then I uncommented the stuff that was actually using the cascade, and it works beautifully.
So, the new onCameraViewStarted method -
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Prog: Trying to get robot cascade");
File file = new File("./data/local/cascade.xml");
boolean fileExists = file.exists();
String fileDoesExist = String.valueOf(fileExists);
Log.d(TAG, "Prog: Does the cascade file exist? "+fileDoesExist);
robot_cascade = new CascadeClassifier("./data/local/cascade.xml");
robot_cascade.load("./data/local/cascade.xml");
String robot_cascade_name = "./data/local/cascade.xml";
Log.d(TAG, "Prog: location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "Prog: --(!)Error loading robot cascade");
} else {
Log.d(TAG, "Prog: --Holy smite the cascade is actually there praise the sun");
}
Log.d(TAG, "Prog: Made it through loading cascade!");
}
Of course, now I'm trying to get it to work on the phone instead of emulated and that's not working, but that's a matter for a different question.

Related

java.lang.IllegalAccessException: Tried to access visual service WindowManager from a non-visual Context

I've been struggling to implement a camera function into my app in a way that doesn't generate the below error:
E/ContextImpl: Tried to access visual service WindowManager from a
non-visual Context:com.camtest.App#385f002 Visual services,
such as WindowManager, WallpaperService or LayoutInflater should be
accessed from Activity or other visual Context. Use an Activity or a
Context created with Context#createWindowContext(int, Bundle), which
are adjusted to the configuration and visual bounds of an area on
screen.
java.lang.IllegalAccessException: Tried to access visual service
WindowManager from a non-visual Context:com.camtest.App#385f002
That error is triggered by this line:
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(this);
I looked at implementing createWindowContext as the error suggests, but some of the target devices are older and not eligible for upgrade to Android 11, thus createWindowContext is not an option.
The first time around, I followed one of the CodeLabs for implementing CameraX. The camera behaved as expected, but triggered the exception. So I found a different example of implementing CameraX, but I get the same IllegalAccessException exception.
Any suggestions?
package com.camtest;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
public class CamTest extends AppCompatActivity {
private Executor executor = Executors.newSingleThreadExecutor();
private int REQUEST_CODE_PERMISSIONS = 9001;
private final String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA", "android.permission.WRITE_EXTERNAL_STORAGE"};
PreviewView mPreviewView;
ImageView captureImage;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_test);
mPreviewView = findViewById(R.id.camera);//was previewView
captureImage = findViewById(R.id.captureImg);
if(allPermissionsGranted()){
startCamera(); //start camera if permission has been granted by user
} else{
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void startCamera() {
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(this); //This line triggers `E/ContextImpl: Tried to access visual service WindowManager from a non-visual Context`
cameraProviderFuture.addListener(new Runnable() {
#Override
public void run() {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
}
}
}, ContextCompat.getMainExecutor(this));
}
void bindPreview(#NonNull ProcessCameraProvider cameraProvider) {
Preview preview = new Preview.Builder().build();
ImageCapture imageCapture = new ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
Camera camera = cameraProvider.bindToLifecycle(
((LifecycleOwner) this),
cameraSelector,
preview,
imageCapture);
preview.setSurfaceProvider(
mPreviewView.getSurfaceProvider());
captureImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
SimpleDateFormat mDateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US);
File file = new File(getBatchDirectoryName(), mDateFormat.format(new Date())+ ".jpg");
ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(file).build();
imageCapture.takePicture(outputFileOptions, executor, new ImageCapture.OnImageSavedCallback () {
#Override
public void onImageSaved(#NonNull ImageCapture.OutputFileResults outputFileResults) {
new Handler().post(new Runnable() {
#Override
public void run() {
Toast.makeText(CamTest.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
#Override
public void onError(#NonNull ImageCaptureException error) {
error.printStackTrace();
}
});
}
});
}
public String getBatchDirectoryName() {
String app_folder_path = "";
app_folder_path = Environment.getExternalStorageDirectory().toString() + "/images";
File dir = new File(app_folder_path);
if (!dir.exists() && !dir.mkdirs()) {
}
return app_folder_path;
}
private boolean allPermissionsGranted(){
for(String permission : REQUIRED_PERMISSIONS){
if(ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){
return false;
}
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if(requestCode == REQUEST_CODE_PERMISSIONS){
if(allPermissionsGranted()){
startCamera();
} else{
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show();
this.finish();
}
}
}
}
And this activity is started by the below code within onCreate of MainActivity:
Button button_test = findViewById(R.id.button_test);
button_test.setOnClickListener(view -> {
Intent intent = new Intent(MainActivity.this, CamTest.class);
startActivityForResult(intent,0);
});
EDIT: full stack trace-
E/ContextImpl: Tried to access visual service WindowManager from a non-visual Context:com.camtest.App#dd90e6b Visual services, such as WindowManager, WallpaperService or LayoutInflater should be accessed from Activity or other visual Context. Use an Activity or a Context created with Context#createWindowContext(int, Bundle), which are adjusted to the configuration and visual bounds of an area on screen.
java.lang.IllegalAccessException: Tried to access visual service WindowManager from a non-visual Context:com.camtest.App#dd90e6b
at android.app.ContextImpl.getSystemService(ContextImpl.java:1914)
at android.content.ContextWrapper.getSystemService(ContextWrapper.java:803)
at androidx.camera.camera2.internal.Camera2UseCaseConfigFactory.<init>(Camera2UseCaseConfigFactory.java:50)
at androidx.camera.camera2.Camera2Config.lambda$defaultConfig$1(Camera2Config.java:60)
at androidx.camera.camera2.-$$Lambda$Camera2Config$g_hY10kZhqC56um0PalOLTzuFlU.newInstance(Unknown Source:0)
at androidx.camera.core.CameraX.lambda$initAndRetryRecursively$9$CameraX(CameraX.java:575)
at androidx.camera.core.-$$Lambda$CameraX$u-Xx2b6YXY5GXNXRh-mDiDnHdpQ.run(Unknown Source:10)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
at java.lang.Thread.run(Thread.java:923)
EDIT #2: In order to reproduce this error, the StrictMode for VmPolicy must be enabled. The below code is added to MainActivity.onCreate:
if( BuildConfig.BUILD_TYPE.contentEquals( "debug" ) ){
/*StrictMode.setThreadPolicy( new StrictMode.ThreadPolicy.Builder()
.detectAll()
.penaltyLog()
.build());*/
StrictMode.setVmPolicy( new StrictMode.VmPolicy.Builder()
.detectAll()//.detectNonSdkApiUsage()
.penaltyLog()
.build());
}
EDIT #3:
Updating from CameraX version 1.0.0-beta12 to 1.0.0-rc1 (current version as of today) had no effect
Pass the Context from the Activity rather than the Application.
The stack trace indicates you're passing an instance of com.camtest.App. Since you're just passing it from your Activity.this, I imagine the library you're using is calling Context.getApplicationContext() incorrectly. You'll need to chase this up with the library maintainers.

getting location via GPS on android with Processing

im working on a small app that needs access to GPS so that i can track my position.
However i pasted some code that i can use to check if it works. I will rewrite it later so that it can be adjusted but for now i just want to try it.
BUT when i execute the app all paramaters stay the way they were initialized.
I do have all permissions enabled and also GPS enabled. Even went outside to check if it works, but it will always stay the same.
after the app asks if i allow the app to use gps service everything is executed correctly. It returns positive for location tracking.
Here is the code: (it can also be found under here: https://github.com/codeanticode/processing-android-tutorials/blob/master/location_permissions/ex1_gps/ex1_gps.pde)
/*****************************************************************************************
Android Processing GPS example
Query the phone's GPS and display the data on the screen
Rolf van Gelder - v 22/02/2011 - http://cage.nl :: http://cagewebdev.com :: info#cage.nl
Check the ACCESS_FINE_LOCATION permission in Sketch Permissions!
*****************************************************************************************/
// Import needed Android libs
import android.content.Context;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.provider.Settings;
import android.os.Bundle;
import android.Manifest;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
// Set up the variables for the LocationManager and LocationListener
LocationManager locationManager;
MyLocationListener locationListener;
// Variables to hold the current GPS data
float currentLatitude = 0;
float currentLongitude = 0;
float currentAccuracy = 0;
String currentProvider = "";
boolean hasLocation = false;
void setup () {
fullScreen();
orientation(PORTRAIT);
textFont(createFont("SansSerif", 26 * displayDensity));
textAlign(CENTER, CENTER);
requestPermission("android.permission.ACCESS_FINE_LOCATION", "initLocation");
}
void draw() {
background(0);
if (hasPermission("android.permission.ACCESS_FINE_LOCATION")) {
text("Latitude: " + currentLatitude + "\n" +
"Longitude: " + currentLongitude + "\n" +
"Accuracy: " + currentAccuracy + "\n" +
"Provider: " + currentProvider, 0, 0, width, height);
} else {
text("No permissions to access location", 0, 0, width, height);
}
}
void initLocation(boolean granted) {
if (granted) {
Context context = getContext();
locationListener = new MyLocationListener();
locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
// Register the listener with the Location Manager to receive location updates
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, locationListener);
hasLocation = true;
} else {
hasLocation = false;
}
}
// Class for capturing the GPS data
class MyLocationListener implements LocationListener {
public void onLocationChanged(Location location) {
currentLatitude = (float)location.getLatitude();
currentLongitude = (float)location.getLongitude();
currentAccuracy = (float)location.getAccuracy();
currentProvider = location.getProvider();
}
public void onProviderDisabled (String provider) {
currentProvider = "";
}
public void onProviderEnabled (String provider) {
currentProvider = provider;
}
public void onStatusChanged (String provider, int status, Bundle extras) {
}
}
The solution:
change the "0, 0" in locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, locationListener); to a "1000, 0" depending on how often you want to get your current position in this case: 1000 ms. Im not gonna talk about the second 0.
also instead of using NETWORK_PROVIDER use GPS_PROVIDER to get your true GPS pos instead of networked pos.

BLE connection issues

I am working on an app that can connect to an adafruit flora BLE device to receive information from it. I want the app to display the list of found devices and when an item in the list view is clicked the connection is made and data can be received. Ultimately I want to take said data over to another activity to graph in realtime (if possible). There are a few things going on that i dont understand and I hope someone can shed light on.
When it is scanning for devices the list view shows them but there are multiples of each.
For some reason the onPause and onResume make the list view glitchy (displays devices and then removes them)
How do I know when there is a connection?
When I have getRemoteDevice in my code I get a runtime error ( Attempt to invoke virtual method 'android.bluetooth.BluetoothDevice android.bluetooth.BluetoothAdapter.getRemoteDevice(java.lang.String)' on a null object reference)
When I try and use filters and settings in the startScan method I get nothing in my list, I have also tried null filters w/ settings and still nothing.
import android.annotation.SuppressLint;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattDescriptor;
import android.bluetooth.BluetoothProfile;
import android.bluetooth.le.BluetoothLeScanner;
import android.bluetooth.le.ScanCallback;
import android.content.Intent;
import android.os.Build;
import android.os.ParcelUuid;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import no.nordicsemi.android.support.v18.scanner.BluetoothLeScannerCompat;
import no.nordicsemi.android.support.v18.scanner.ScanFilter;
import no.nordicsemi.android.support.v18.scanner.ScanResult;
import no.nordicsemi.android.support.v18.scanner.ScanSettings;
public class BluetoothDiscovery extends AppCompatActivity {
private String TAG = "Bluetooth Device";
private int REQUEST_ENABLE_BT = 5;
private BluetoothAdapter mBluetoothAdapter;
private BluetoothLeScannerCompat scanner;
private ScanSettings settings;
private UUID baseUUID = UUID.fromString("6e400001-b5a3-f393-e0a9-e50e24dcca9e"); // service UUID
private UUID txUUID = UUID.fromString("6e400002-b5a3-f393-e0a9-e50e24dcca9e"); // TX UUID characteristic
private UUID rxUUID = UUID.fromString("6e400003-b5a3-f393-e0a9-e50e24dcca9e"); // RX UUID characteristic
private ScanFilter scanFilter;
private BluetoothDevice device, mdevice;
private BluetoothGatt mGatt;
private boolean mScanning = false;
private ArrayList<deviceShowFormat> foundDevices = new ArrayList<>();
formattingAdapter BTadapter;
Button scanButton;
TextView fancyWords;
ListView deviceList;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_bluetooth_discovery);
mBluetoothAdapter.getDefaultAdapter();
scanButton = findViewById(R.id.scanButt);
scanButton.setText(getString(R.string.notScanning));
fancyWords = findViewById(R.id.discoverText);
fancyWords.setText(getString(R.string.nonScanTitle));
deviceList = findViewById(R.id.deviceList);
BTadapter = new formattingAdapter(BluetoothDiscovery.this, foundDevices);
deviceList.setAdapter(BTadapter);
scanner = BluetoothLeScannerCompat.getScanner();
settings = new ScanSettings.Builder().setScanMode(ScanSettings.SCAN_MODE_BALANCED).setReportDelay(500).build();
scanFilter = new ScanFilter.Builder().setServiceUuid(new ParcelUuid(baseUUID)).build();
//scanner.startScan(Arrays.asList(scanFilter), settings, mScanCallback);
deviceList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
#SuppressLint("LongLogTag")
#Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
deviceShowFormat mBTDevice = foundDevices.get(i);
BluetoothDevice Device = mBTDevice.get_device();
String deviceName = mBTDevice.get_device_name();
String deviceAddress = mBTDevice.get_device_address();
Log.d(TAG, "Selected device: " + Device.toString());
Log.d(TAG, "Selected device name: " + deviceName);
Log.d(TAG, "Selected device address: " + deviceAddress);
//BluetoothDevice deviceConnect = mBluetoothAdapter.getRemoteDevice(deviceAddress);
//deviceConnect.createBond();
mGatt = Device.connectGatt(BluetoothDiscovery.this, false, mGattCallback);
Toast.makeText(BluetoothDiscovery.this, "Selected device: " + deviceName, Toast.LENGTH_SHORT).show();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
Device.createBond();
}
Log.d(TAG, "" + Device.getBondState());
if(Device.getBondState() == BluetoothDevice.BOND_BONDED){
Toast.makeText(BluetoothDiscovery.this, "Bluetooth device connected successfully", Toast.LENGTH_SHORT).show();
}
mGatt.getServices();
mGatt.getConnectedDevices();
Log.d("THIS IS THE DEVICES UUID", String.valueOf(Device.getUuids()));
Log.d("DEVICE SERVICES", String.valueOf(mGatt.getServices()));
}
});
}
private final no.nordicsemi.android.support.v18.scanner.ScanCallback mScanCallback = new no.nordicsemi.android.support.v18.scanner.ScanCallback() {
#Override
public void onScanResult(int callbackType, ScanResult result) {
super.onScanResult(callbackType, result);
Log.i("onScanResult", "device detected");
device = result.getDevice();
String deviceName = device.getName();
String deviceAddress = device.getAddress();
Log.d(TAG, "Scanned device: " + device.toString());
Log.d(TAG, "Scanned device name: " + deviceName);
Log.d(TAG, "Scanned device address: " + deviceAddress);
foundDevices.add(new deviceShowFormat(device, deviceName, deviceAddress));
BTadapter.notifyDataSetChanged();
}
};
private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
#Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
super.onConnectionStateChange(gatt, status, newState);
Log.i("onConnectionStateChange", "State Changed from: " + status + " to " + newState);
if (newState == BluetoothProfile.STATE_CONNECTED){
Toast.makeText(BluetoothDiscovery.this, "Attempting service discovery", Toast.LENGTH_SHORT).show();
Log.i("onConnectionStateChange", "Attempting service discovery: " + gatt.discoverServices());
gatt.discoverServices();
} else if (newState == BluetoothProfile.STATE_DISCONNECTED){
Toast.makeText(BluetoothDiscovery.this, "Connection has been terminated", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onServicesDiscovered(BluetoothGatt gatt, int status){
super.onServicesDiscovered(gatt, status);
Log.i("onServicesDiscovered", "Hey, we found a service");
if (status != BluetoothGatt.GATT_SUCCESS){
// Handle error
Log.d("onServicesDiscovered" , "" + status);
return;
}
BluetoothGattCharacteristic characteristic = gatt.getService(baseUUID).getCharacteristic(rxUUID);
gatt.setCharacteristicNotification(characteristic, true);
BluetoothGattDescriptor descriptor = characteristic.getDescriptor(txUUID);
descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE);
gatt.writeDescriptor(descriptor);
}
#Override
public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status){
Log.i("onCharacteristicRead", "Characteristic has been read");
readCounterCharacteristic(characteristic);
}
#Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
if (mGatt.writeCharacteristic(characteristic)){
Log.d("Characteristic changed", "Possibly looking for a write");
}
if (mGatt.readCharacteristic(characteristic)){
readCounterCharacteristic(characteristic);
}
}
private void readCounterCharacteristic(BluetoothGattCharacteristic characteristic){
if (mGatt.readCharacteristic(characteristic)){
byte[] data = characteristic.getValue();
Log.d("READ DATA", data.toString());
}
// if (rxUUID.equals(characteristic.getUuid())){
// //byte[] data = characteristic.getValue();
// byte[] data = mGatt.readCharacteristic(characteristic);
// //int value = Ints.fromByteArray(data);
// Log.d("READ DATA", data.toString());
// }
}
};
public void toggleScan(View view){
mScanning = !mScanning;
if(mScanning){
scanner.startScan(mScanCallback); //Arrays.asList(scanFilter) null, settings,
scanButton.setText(getString(R.string.scanInProgress));
fancyWords.setText(getString(R.string.ScanTitle));
} else {
scanner.stopScan(mScanCallback);
scanButton.setText(getString(R.string.notScanning));
}
}
// #Override
// public void onPause(){
// super.onPause();
//
//// if(mScanning){
//// mScanning = !mScanning;
// scanner.stopScan(mScanCallback);
//// }
//
// //Empty Adapter
// //BTadapter.clear();
// //BTadapter.notifyDataSetChanged();
//
// //mdevice = device;
//
// }
//
// #Override
// public void onResume(){
// super.onResume();
//
// if (mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled()) {
// Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
// startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
// }
//
// //device = mdevice;
// }
}
The code is the second activity in my app, in the first bluetooth is initialized and what-not. The above code works but I dont receive any data from the device and am not sure its truly connected. from the logs shown in the code I get:
Logcat
Logcat2
Resources im using:
https://github.com/NordicSemiconductor/Android-Scanner-Compat-Library
https://learn.adafruit.com/introducing-the-adafruit-bluefruit-le-uart-friend/uart-service
http://nilhcem.com/android-things/bluetooth-low-energy
You missed to initialize mBluetoothAdapter
As of (at oncreate):
BluetoothManager manager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
mBluetoothAdapter = manager.getAdapter();
instead of:
mBluetoothAdapter.getDefaultAdapter();
After that your variable is initialized and ready to use, just check if you need RuntimePermissions for using bluetooth.
Much of the stuff in android BLE communication is asynchronous.
So when you call Device.connectGatt(), you need to wait for the onConnectionStateChange callback before doing the next thing. Your attempts to enumerate the services will fail (probably quietly) because your connection state hasn't gone from 0 to 2 at that point. In your log you can see that you do the getServices() call before it has called you back to say the connection state has gone to connected.
Making things trickier, in my experience you should only be doing your BluetoothDevice interaction from the main thread. (Edit: Only sending requests from the main thread is apparently not a requirement)
So each command you do (connecting, reading characteristic values, writing, disconnecting, enumerating services, etc) needs to look like:
Main Thread: Call the (connect|read|write|enumerate) function
Callback thread: Java calls your BluetoothGattCallback, and you should find a way (Handler?) to signal the main thread to do your next thing.
Main thread: process the result from BluetoothGattCallback and trigger the next thing you'll be doing.
Mild aside: This is a lot easier in objective-c and react-native-ble-plx, because they handle the threading for you. If you design a good way to get the results from the callback thread to the main thread from the beginning, you will save yourself a lot of pain.
Example code for getting stuff back to the UI thread:
Handler m_handler;
public void onResume() {
m_handler = new Handler(); // creates a Handler bound to the UI thread, which lets us fire messages back and forth between threads.
}
// ... other stuff ...
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
super.onConnectionStateChange(gatt, status, newState);
Log.i("onConnectionStateChange", "State Changed from: " + status + " to " + newState);
if (newState == BluetoothProfile.STATE_CONNECTED){
// oh boy, we're connected. Let's try to discover services!
// m_handler.post() is a good way to make sure the thing you pass runs on the UI thread.
m_handler.post(new Runnable() {
#Override
public void run() {
gatt.discoverServices();
}
})
}
}
For anyone looking this up in the future: If you receive a message similar to D/BluetoothGatt: onConnectionUpdated() - Device=xx:xx:xx:xx:xx:xx interval=6 latency=0 timeout=500 status=0 you are connecting to the device but not "enough" to recieve info. For me it turned out i had to click the device to connect to a second time to actually recieve the data. I dont know why yet
in your answer
For anyone looking this up in the future: If you receive a message
similar to D/BluetoothGatt: onConnectionUpdated() -
Device=xx:xx:xx:xx:xx:xx interval=6 latency=0 timeout=500 status=0 you
are connecting to the device but not "enough" to recieve info. For me
it turned out i had to click the device to connect to a second time to
actually recieve the data. I dont know why yet
I Unfortunately got this problem, you can see in my question
BLE onDescriptorWrite is not trigger when writeDescriptor receive true
And What do you say is the second click device, your app or other ble device.
Thanks a lot.

Android: How to store an image from camera and external storage into SQLite and also get it back again?

I have been trying to figure out how to store an image or the location of an image in a SQLite database. And then also get the image from the database back, as well.
Here I have an activity that uses the camera. I've made comments in the code to explain my steps.
I just need to see how to edit onActivityResult here in my code. I commented some of my SQLite database attempt. How can I put this external storage image from the camera into my database and then also how can I get it back out from the database and display as an image again?
I have the database code in a seperate activity. I am just showing the code with the camera here and need general guidance of what I should do.
My activity:
package com.example.myapp.stepbystep;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class WalkPhoto extends AppCompatActivity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_walk_photo);
Button button= findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
dispatchTakePictureIntent();
}
});
}
/*
* SWITCH TO CAMERA APP
* Define integer called request image capture.
* Create an intent: Tell OS planning to open the camera. Action of image capture moves from this app to camera app
* If not null (null = no camera app on device), then start the camera app
*/
static final int REQUEST_IMAGE_CAPTURE = 1;
private void dispatchTakePictureIntent() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE);
}
}
/*
* GET IMAGE FROM CAMERA APP BACK TO MAIN APP
* onActivityResult gets called when there is a result for the intent.
* It checks whether the request image capture is true and then the result is okay
* If correct, then the image is stored in the intent data
* Stores data in bundle
* Get image as bitmap
* Use an image view to display it to user
*/
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
Bundle extras = data.getExtras();
Bitmap imageBitmap = (Bitmap) extras.get("data");
ImageView imageview = findViewById(R.id.imageView);
imageview.setImageBitmap(imageBitmap);
// DatabaseHandler mydb = new DatabaseHandler(getApplicationContext());
// Walk walk = new Walk();
// walk.set_photo("data");
//mydb.addWalk(walk);
//Toast.makeText(getApplicationContext(), "Walk Successfully Added.", Toast.LENGTH_SHORT).show();
/*
* STORE IMAGE TO EXTERNAL STORAGE
* External storage is used to save the image to the device outside of your main app
* Retrieve the directory
* Create a new file and call it a name
* Output stream
* Compress the image
*/
File storageLoc = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
File file = new File(storageLoc, "myImage.jpg");
try{
FileOutputStream fos = new FileOutputStream(file);
imageBitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
scanFile(this, Uri.fromFile(file));
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/*
* BROADCAST TO REFRESH THE MEDIA CENTER WITH THE NEW IMAGE
* scanFile sends the broadcast receiver to the media center.
* It sees a new file in the system, scans the file, and list the file in the directory to the user
* Have to create an intent, set the data, and send the broadcast
* (Media center is only refreshed once when the device is restarted.
* Overloads the OS if refreshed all of the time. So just check once here to see new file, scan, and list)
*/
private static void scanFile(Context context, Uri imageUri){
Intent scanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
scanIntent.setData(imageUri);
context.sendBroadcast(scanIntent);
}
}
It is quite likely that pictures take with the camera will be large, too large for them to be efficiently stored and more importantly with a high risk of crashes in SQLite when retrieving such images. This is due to a size limitation with a CursorWindow (1-2Mb).
If the images are up to around 100Kb then SQLite can be efficient.
As such the recommended method is to store and retrieve images that will be over 100Kb as a standard file and to then store the path in the database.
Assuming the larger images then DB wise you could :-
Create a subclass of the SQLiteOpenHelper e.g. SO49827073DBHelper.java
:-
public class SO49827073DBHelper extends SQLiteOpenHelper {
public static final String DBNAME ="walk.db";
public static final int DBVERSION = 1;
public static final String TB_WALK = "walk";
public static final String COL_WALK_ID = BaseColumns._ID; // equates to _id
public static final String COL_WALK_PHOTDESCRIPTION = "photo_dsecription";
public static final String COL_WALK_PATH = "storeage_path";
SQLiteDatabase mDB;
public SO49827073DBHelper(Context context) {
super(context, DBNAME, null, DBVERSION);
mDB = this.getWritableDatabase(); //<<< Force creation when first instantiated
}
#Override
public void onCreate(SQLiteDatabase db) {
//!!NOTE!! onCREATE ONLY RUNS ONCE WHEN DB IS CREATED
String crtsql = "CREATE TABLE IF NOT EXISTS " + TB_WALK + "(" +
COL_WALK_ID + " INTEGER PRIMARY KEY, " +
COL_WALK_PHOTDESCRIPTION + " TEXT," +
COL_WALK_PATH + " TEXT" +
")";
db.execSQL(crtsql);
//!!NOTE!! to recreate tables delete the App's data or uninstall
}
#Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
public long addWalk(String description, String photo_path) {
ContentValues cv = new ContentValues();
cv.put(COL_WALK_PHOTDESCRIPTION,description);
cv.put(COL_WALK_PATH,photo_path);
return this.getWritableDatabase().insert(TB_WALK,null,cv);
//!!Note!! will return 1 or greater (the id) if insert was ok
}
public Cursor getAll() {
return this.getWritableDatabase().query(
TB_WALK,
null,
null,
null,
null,
null,
null
);
}
}
Noting that we are going instantiate an instance, add a few rows using the addWalk method and then get all the rows into a Cursor via the getAll method and then traverse the Cursor writing out to the log.
So in the invoking activity you could have something along the lines of
:-
SO49827073DBHelper mydb = new SO49827073DBHelper(this); //<<<< get instance of dbhelper
mydb.addWalk("I fell into a puddle!!","photos/puddlefalling/fall001.jpg"); //<<<<Add row
mydb.addWalk("Found a black cat in a coal cellar","photos/unusual/blackcatinacoalcellar.jpg");//<<<<Add row
Cursor csr = mydb.getAll(); //<<<< get all rows the loop through them
while (csr.moveToNext()) {
Log.d("WALKINFRO",
"Photo: " + csr.getString(csr.getColumnIndex(SO49827073DBHelper.COL_WALK_PHOTDESCRIPTION)) +
"\n\tStored: " + csr.getString(csr.getColumnIndex(SO49827073DBHelper.COL_WALK_PATH)) +
"\n\tID " + String.valueOf(csr.getLong(csr.getColumnIndex(SO49827073DBHelper.COL_WALK_ID)))
);
}
csr.close();
So take a photo file it, get the file name, get the description and call addWalk method passing both through.
retrieval would be based upon the getAll (perhaps add a getById method)
Sorry about the longish names they are for my convenience.

java.lang.IllegalStateException: failed to get surface

I am trying to create an app which enables the user to record his smartphones's screen.
This is my starting code:
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MainActivity extends AppCompatActivity
{
private static final int CAST_PERMISSION_CODE = 22;
private DisplayMetrics mDisplayMetrics = new DisplayMetrics();
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaRecorder mMediaRecorder;
private MediaProjectionManager mProjectionManager;
private Button startButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startButton = (Button) findViewById( R.id.recordButton );
mMediaRecorder = new MediaRecorder();
mProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
getWindowManager().getDefaultDisplay().getMetrics(this.mDisplayMetrics);
prepareRecording();
startRecording();
}
private void startRecording() {
// If mMediaProjection is null that means we didn't get a context, lets ask the user
if (mMediaProjection == null) {
// This asks for user permissions to capture the screen
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), CAST_PERMISSION_CODE);
return;
}
mVirtualDisplay = getVirtualDisplay();
mMediaRecorder.start();
}
private void stopRecording() {
if (mMediaRecorder != null) {
mMediaRecorder.stop();
mMediaRecorder.reset();
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
prepareRecording();
}
public String getCurSysDate() {
return new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date());
}
private void prepareRecording() {
try {
mMediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
return;
}
final String directory = Environment.getExternalStorageDirectory() + File.separator + "Recordings";
if (!Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) {
Toast.makeText(this, "Failed to get External Storage", Toast.LENGTH_SHORT).show();
return;
}
final File folder = new File(directory);
boolean success = true;
if (!folder.exists()) {
success = folder.mkdir();
}
String filePath;
if (success) {
String videoName = ("capture_" + getCurSysDate() + ".mp4");
filePath = directory + File.separator + videoName;
} else {
Toast.makeText(this, "Failed to create Recordings directory", Toast.LENGTH_SHORT).show();
return;
}
int width = mDisplayMetrics.widthPixels;
int height = mDisplayMetrics.heightPixels;
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setOutputFile(filePath);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != CAST_PERMISSION_CODE) {
// Where did we get this request from ? -_-
//Log.w(TAG, "Unknown request code: " + requestCode);
return;
}
if (resultCode != RESULT_OK) {
Toast.makeText(this, "Screen Cast Permission Denied :(", Toast.LENGTH_SHORT).show();
return;
}
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
// TODO Register a callback that will listen onStop and release & prepare the recorder for next recording
// mMediaProjection.registerCallback(callback, null);
mVirtualDisplay = getVirtualDisplay();
mMediaRecorder.start();
}
private VirtualDisplay getVirtualDisplay()
{
int screenDensity = mDisplayMetrics.densityDpi;
int width = mDisplayMetrics.widthPixels;
int height = mDisplayMetrics.heightPixels;
return mMediaProjection.createVirtualDisplay(this.getClass().getSimpleName(), width, height, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}
}
After showing a message that informs the user about the screen capture function, my app crushes.
java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=22, result=-1, data=Intent { (has extras) }} to activity {gr.awm.clrecorder/gr.awm.clrecorder.MainActivity}: java.lang.IllegalStateException: failed to get surface
at android.app.ActivityThread.deliverResults(ActivityThread.java:3974)
at android.app.ActivityThread.handleSendResult(ActivityThread.java:4017)
at android.app.ActivityThread.access$1400(ActivityThread.java:172)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1471)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:145)
at android.app.ActivityThread.main(ActivityThread.java:5832)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1399)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1194)
Caused by: java.lang.IllegalStateException: failed to get surface
at android.media.MediaRecorder.getSurface(Native Method)
at gr.awm.clrecorder.MainActivity.getVirtualDisplay(MainActivity.java:148)
at gr.awm.clrecorder.MainActivity.onActivityResult(MainActivity.java:135)
Is there a way to solve this issue? Any advice would be helpful and deeply appreciated.
Thanks in advance
Nevermind the comment btw.
I dug into the documentation and your code and got the following results.
This is the order you call the mMediaRecorder methods to get a surface.
mMediaRecorder.prepare();
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setOutputFile(filePath);
This is what the documentation says
//Call this method before prepare().
setVideoEncodingBitRate(); //no exception thrown
//Must be called after setVideoSource(). Call this after setOutFormat() but before prepare().
setVideoSize(width, height); //IllegalStateException if it is called after prepare() or before setOutputFormat()
//Call this only before setOutputFormat().
setAudioSource(); //IllegalStateException if it is called after setOutputFormat()
setVideoSource(); //IllegalStateException if it is called after setOutputFormat()
//Call this after setOutputFormat() and before prepare().
setVideoEncoder(); //IllegalStateException if it is called before setOutputFormat() or after prepare()
setAudioEncoder(); //IllegalStateException if it is called before setOutputFormat() or after prepare().
//Call this after setAudioSource()/setVideoSource() but before prepare().
setOutputFormat(); //IllegalStateException if it is called after prepare() or before setAudioSource()/setVideoSource().
//Call this after setOutputFormat() but before prepare().
setOutputFile(); //IllegalStateException if it is called before setOutputFormat() or after prepare()
//Must be called after setVideoSource(). Call this after setOutFormat() but before prepare().
setVideoFrameRate(); //IllegalStateException if it is called after prepare() or before setOutputFormat().
//This method must be called after setting up the desired audio and video sources, encoders, file format, etc., but before start()
prepare() //IllegalStateException if it is called after start() or before setOutputFormat().
So in order to get the mMediaRecorder in a correct state you have to call the methods in this order:
setAudioSource()
setVideoSource()
setOutputFormat()
setAudioEncoder()
setVideoEncoder()
setVideoSize()
setVideoFrameRate()
setOutputFile()
setVideoEncodingBitRate()
prepare()
start()
I think I also got an undocumented error when I called the setEncoder Methods before the setSource Methods
Edit: I thought I got working code, but I still get IllegalStateExceptions although the code is in the order of the documentation.
Edit2: I got it working now. Things that might also not working and additional error messaages:
Permissions for external Storage and Microphone not set (add use-permissions in the Manifest)
Android MediaRecorder start failed in invalid state 4
IllegalStateException [start called in an invalid state: 1] on restarting Android MediaRecorder
I had to create a directory where the App could write to. I couldn't get the external Storage to work so I used
the data directory. But that is unrelated to the mMediaRecorder code
This code works:
private void prepareRecording() {
//Deal with FileDescriptor and Directory here
//Took audio out because emulator has no mic
//mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setOutputFile(filePath);
try {
mMediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
return;
}
//Field variable to hold surface object
//Deal with it as you see fit
surface = mMediaRecorder.getSurface();
Beware Although the above code works in creating the MediaRecorder correctly and writing to storage, it crashes the whole emulator when mMediaRecorder.stop() is called.
Maybe you set a wrong video size or a wrong video source. make sure mediaRecord.prepare() has been execute successfully before.
I also get the problem, After check all above,I fixed the issue.
I just got the same problem.
The problem only occurs once (after installing the game FOR THE FIRST TIME and after giving the permissions)
so I cleared the application's data (like a thousandth time) to reproduce the error but it never happened again.
So what I did to figure this out is deleting the folder from the storage
in your case the folder's name is String directory = "Recordings"
And this time I got to reproduce the error.
And what I did to fix it is making sure to create the folder once the "WRITE_EXTERNAL_STORAGE" permission is accepted and before calling all the MediaRecorder configuration
switch (requestCode) {
case REQUEST_PERMISSIONS: {
if ((grantResults.length > 0) && (grantResults[0] +
grantResults[1]) == PackageManager.PERMISSION_GRANTED) {
//onToggleScreenShare(mToggleButton);
File folder = new File(Environment.getExternalStorageDirectory() +
File.separator + "textingstories");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
if (success) {
// Do something on success
StartRecord();
} else {
// Do something else on failure
}
}
else {
StartRecord();
}
and for android 10 you might want to add this to the manifest to make sure that the folder is created
<manifest ... >
<application android:requestLegacyExternalStorage="true" ... >
...
</application>
</manifest>

Categories