I am trying to create a chess app, I get move information from Bluetooth, then I need to send it to method located in the core class. Also, I need to start a function that is in AndroidLauncher when a button from core class has been clicked.
AndroidLauncher:
public class AndroidLauncher extends AndroidApplication {
public static final int CONNECTION_TIMEOUT=10000;
public static final int READ_TIMEOUT=15000;
BluetoothSocket mmSocket;
BluetoothDevice mmDevice = null;
final byte delimiter = 33;
int readBufferPosition = 0;
public void sendBtMsg(String msg2send){
//UUID uuid = UUID.fromString("00001101-0000-1000-8000-00805f9b34fb"); //Standard SerialPortService ID
UUID uuid = UUID.fromString("94f39d29-7d6d-437d-973b-fba39e49d4ee"); //Standard SerialPortService ID
try {
mmSocket = mmDevice.createRfcommSocketToServiceRecord(uuid);
if (!mmSocket.isConnected()){
mmSocket.connect();
}
String msg = msg2send;
//msg += "\n";
OutputStream mmOutputStream = mmSocket.getOutputStream();
mmOutputStream.write(msg.getBytes());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
#Override
protected void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
MyGdxGame game = new MyGdxGame();
View gameView = initializeForView(game, config);
initialize(new MyGdxGame(), config);
BluetoothAdapter mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
final class workerThread implements Runnable {
private String btMsg;
public workerThread(String msg) {
btMsg = msg;
}
public void run()
{
sendBtMsg(btMsg);
while(1 == 1) {
while (!Thread.currentThread().isInterrupted()) {
int bytesAvailable;
boolean workDone = false;
try {
final InputStream mmInputStream;
mmInputStream = mmSocket.getInputStream();
bytesAvailable = mmInputStream.available();
if (bytesAvailable > 0) {
byte[] packetBytes = new byte[bytesAvailable];
byte[] readBuffer = new byte[1024];
mmInputStream.read(packetBytes);
for (int i = 0; i < bytesAvailable; i++) {
byte b = packetBytes[i];
if (b == delimiter) {
byte[] encodedBytes = new byte[readBufferPosition];
System.arraycopy(readBuffer, 0, encodedBytes, 0, encodedBytes.length);
final String data = new String(encodedBytes, "US-ASCII");
readBufferPosition = 0;
//The variable data now contains our full command
handler.post(new Runnable() {
public void run() {
char first = data.charAt(0);
if( first == 'm'){
new AsyncLogin().execute(data.substring(1),"-", "0");
} else {
new AsyncLogin().execute("-",data, "0");
}
//myLabel.setText(data);
}
});
workDone = true;
break;
} else {
readBuffer[readBufferPosition++] = b;
}
}
if (workDone == true){
break;
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
};
// end light off button handler
if(!mBluetoothAdapter.isEnabled())
{
Intent enableBluetooth = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBluetooth, 0);
}
Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices();
if(pairedDevices.size() > 0)
{
for(BluetoothDevice device : pairedDevices)
{
if(device.getName().equals("raspberrypi")) //Note, you will need to change this to match the name of your device
{
Log.e("Aquarium",device.getName());
mmDevice = device;
break;
}
}
}
}
MyGdxGame
public class MyGdxGame extends ApplicationAdapter {
public static int V_WIDTH = 720;//720
public static int V_HEIGHT = 1280;//1280
private String slogin = "Prisijungimas",sreg= "Registracija",sai =
"Kompiuteris",shuman = "Ieškoti oponento",sboard;
SpriteBatch batch;
Texture bdt,blt,kdt,klt,ndt,nlt,pdt,plt,qdt,qlt,rdt,rlt;
OrthographicCamera camera;
public Viewport viewport;
private ShowScreen screen = ShowScreen.login;
BitmapFont font,font2;
float rh, rw;
float startpos[][][] = new float[9][9][2];//cia grido pozicijos [eile]
[stulpelis][x/y]
float fig_pos[][]= new float[32][6];// [figuros id]
[x/y/newx/newy/movingx/movingy]
int fig_laukelis[][] = new int[32][2];//[figuros id][eile/stulpelis]
//figuru movinimui
int oldRow;
int newRow;
int oldCol;
int newCol;
ShapeRenderer shape;//renderina shapus as buttons
GlyphLayout layout;
//test string
String test = "move example 1";
#Override
public void create() {
//kai kuriuos var'us reikia initialaizint dar karta kad sukurti ale tuscia array
batch = new SpriteBatch();
shape = new ShapeRenderer();
font = new BitmapFont();//cia kad spausdinti texta
font2 = new BitmapFont();//cia kad spausdinti texta lentoj
camera = new OrthographicCamera(); //nustatom cameros dydi
viewport = new FitViewport(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),camera);//fitinam i screena
viewport.apply();//reikia apply kitaip viewporto dydis nebus issaugotas
//gaunam ratio
rw = (float) Gdx.graphics.getWidth() / (float) V_WIDTH;
rh = (float) Gdx.graphics.getHeight() / (float) V_HEIGHT;
camera.position.set(camera.viewportWidth / 2, camera.viewportHeight / 2, 0);//nustatom cameros pozicija per viduti cameros view
//uzrkaunam img
//prima uzkraunam i manageri
bdt = new Texture("chess_fig/chess_bdt60.png");
blt = new Texture("chess_fig/chess_blt60.png");
kdt = new Texture("chess_fig/chess_kdt60.png");
klt = new Texture("chess_fig/chess_klt60.png");
ndt = new Texture("chess_fig/chess_ndt60.png");
nlt = new Texture("chess_fig/chess_nlt60.png");
pdt = new Texture("chess_fig/chess_pdt60.png");
plt = new Texture("chess_fig/chess_plt60.png");
qdt = new Texture("chess_fig/chess_qdt60.png");
qlt = new Texture("chess_fig/chess_qlt60.png");
rdt = new Texture("chess_fig/chess_rdt60.png");
rlt = new Texture("chess_fig/chess_rlt60.png");
//font
FreeTypeFontGenerator generator = new FreeTypeFontGenerator(Gdx.files.internal("comics_bold.ttf"));//gali pasikeisti i bet koki kita fonta jei nori
FreeTypeFontGenerator.FreeTypeFontParameter parameter = new FreeTypeFontGenerator.FreeTypeFontParameter();
parameter.size = (int) (50*(rw/rh));//dydis
font = generator.generateFont(parameter);///issaugom sugeneratinta fonta
font.setColor(255 / 255f, 255 / 255f, 255 / 255f, 1);
parameter.size = (int)(150*(rw/rh));
font2 = generator.generateFont(parameter);
font2.setColor(Color.BLACK);
//gaunam grida
for(int stulpeis = 0;stulpeis<9;stulpeis++){
for(int eile = 0;eile<9;eile++){
startpos[eile][stulpeis][0]=0+(camera.viewportWidth/9*stulpeis);//x
startpos[eile][stulpeis][1]=0+(camera.viewportWidth/9*eile);//y
}
}
private void movefig(String coord) {
//nuskaitom stringa ir priskiriam teisingas reiksmes
for (int index = 0; index < 4;
index++) {
char aChar = coord.charAt(index);
if(index==0){
if(aChar=='A'){
oldCol=1;
}else if(aChar=='B'){
oldCol=2;
}else if(aChar=='C'){
oldCol=3;
}else if(aChar=='D'){
oldCol=4;
}else if(aChar=='E'){
oldCol=5;
}else if(aChar=='F'){
oldCol=6;
}else if(aChar== 'G'){
oldCol=7;
}else if(aChar == 'H'){
oldCol=8;
}
}else if(index==1){
if(aChar=='1'){
oldRow=1;
}else if(aChar=='2'){
oldRow=2;
}else if(aChar=='3'){
oldRow=3;
}else if(aChar=='4'){
oldRow=4;
}else if(aChar=='5'){
oldRow=5;
}else if(aChar=='6'){
oldRow=6;
}else if(aChar== '7'){
oldRow=7;
}else if(aChar == '8'){
oldRow=8;
}
}else if(index==2){
if(aChar=='A'){
newCol=1;
}else if(aChar=='B'){
newCol=2;
}else if(aChar=='C'){
newCol=3;
}else if(aChar=='D'){
newCol=4;
}else if(aChar=='E'){
newCol=5;
}else if(aChar=='F'){
newCol=6;
}else if(aChar== 'G'){
newCol=7;
}else if(aChar == 'H'){
newCol=8;
}
}else if(index==3){
if(aChar=='1'){
newRow=1;
}else if(aChar=='2'){
newRow=2;
}else if(aChar=='3'){
newRow=3;
}else if(aChar=='4'){
newRow=4;
}else if(aChar=='5'){
newRow=5;
}else if(aChar=='6'){
newRow=6;
}else if(aChar== '7'){
newRow=7;
}else if(aChar == '8'){
newRow=8;
}
}
}
//ieskom figureles kuri butu senoj pozicioj
for(int f = 0;f<32;f++){
if(fig_laukelis[f][0]==oldRow && fig_laukelis[f][1]==oldCol){
//nzn kas cia nutiko bet cia susimaiso row ir collumai ;/
fig_pos[f][2]=(camera.viewportWidth/9)*(newCol);
fig_pos[f][3]=(camera.viewportWidth/9)*(newRow);
//atnaujinam figuros laukeli
fig_laukelis[f][0]=newRow;
fig_laukelis[f][1]=newCol;
//radom tad baigiam cikla
f=33;
}
}
}
}
What I need to do is call movefig method when I get data from Bluetooth in AndroidLauncher handler.post(new Runnable() { part of the code.
Make movefig() of MyGdxGame public so that you can access from any package.
MyGdxGame gdxGame;
#Override
protected void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
gdxGame = new MyGdxGame();
View gameView = initializeForView(gdxGame, config);
setContentView(gameView);
}
Now you've reference of MyGdxGame, You can call movefig() inside AndroidLauncher class.
You need interfacing by which you make call core module to android module
Create an interface inside your core module
public interface Service {
void doSomeThing();
}
Implement this interface in AndroidLauncher class
public class AndroidLauncher extends AndroidApplication implements Service {
MyGdxGame gdxGame;
#Override
protected void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
gdxGame = new MyGdxGame(this);
View gameView = initializeForView(gdxGame, config);
setContentView(gameView);
}
#Override
public void doSomething() {
//do what you want
}
...
}
Keep reference of Service that comes from platform so create parameterised constructor of MyGdxGame
public class MyGdxGame extends ApplicationAdapter {
Service service;
public MyGdxGame(Service service){
this.service=service;
}
...
}
Related
I am new to Android. I am developing an application which uses AudioRecorder and AudioTrack for recording and playing. In addition to this, I am trying to read and display the amplitude values of recorded sound.
Here is my class
public class MainActivity extends AppCompatActivity {
private static final String TAG = "RecordSound";
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 8000;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
private double lastLevel = 0;
private Thread thread;
private static final int SAMPLE_DELAY = 75;
MediaPlayer mediaPlayer;
RelativeLayout layout;
private ImageView tankHolder;
TextView text;
Button play;
String filename;
final Handler mHandler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tankHolder = (ImageView)findViewById(R.id.tankHolder);
text=(TextView)findViewById(R.id.result_text);
layout=(RelativeLayout)findViewById(R.id.linear);
play=(Button)findViewById(R.id.btnStartPlay);
try {
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}catch (Exception e){
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
//calling MediaPlayer for alarmtone when the tank is almost full
mediaPlayer = MediaPlayer.create(this, R.raw.tone);
startRecording();
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
stopRecording();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private void stopRecording() throws IOException{
if (null != record) {
isRecording = false;
record.stop();
record.release();
record = null;
recordingThread = null;
mHandler.post(runRecord);
mediaPlayer.stop();
mediaPlayer.release();
}
}
final Runnable runRecord=new Runnable() {
#Override
public void run() {
text.setText("working");
try {
PlayShortAudioFileViaAudioTrack("/sdcard/recorded.pcm");
} catch (IOException e) {
e.printStackTrace();
}
}
};
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
private void startRecording()
{
record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
thread = new Thread(new Runnable() {
public void run() {
while(thread != null && !thread.isInterrupted()){
//Let's make the thread sleep for a the approximate sampling time
try{
Thread.sleep(SAMPLE_DELAY);}catch(InterruptedException ie){ie.printStackTrace();}
readAudioBuffer();//After this call we can get the last value assigned to the lastLevel variable
runOnUiThread(new Runnable() {
#Override
public void run() {
if(lastLevel >= 7 && lastLevel <= 15){
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.RED);
tankHolder.setImageResource(R.drawable.ftank);
if (!mediaPlayer.isPlaying()){
mediaPlayer.start();
}
text.setText(String.valueOf(lastLevel));
}else
if(lastLevel > 50 && lastLevel <= 100){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Orange));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.htank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}else
if(lastLevel > 100 && lastLevel <= 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Yellow));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
if(lastLevel > 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
}
});
}
}
},"AudioRecorder Thread");
thread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filePath = "/sdcard/recorded.pcm";
filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename +="/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void readAudioBuffer() {
try {
short[] buffer = new short[BufferSize];
int bufferReadResult = 1;
if (record != null) {
// Sense the voice...
bufferReadResult = record.read(buffer, 0, BufferSize);
double sumLevel = 0;
for (int i = 0; i < bufferReadResult; i++) {
sumLevel += buffer[i];
}
lastLevel = Math.abs((sumLevel / bufferReadResult));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
If I run, my runOnUiThread(new Runnable() { is working and on play button press, recorded audio is not playing.
If I remove runOnUiThread(new Runnable() {, then on play button press, recorded audio will play without any problem.
So I think that the problem is with threads and I tried to implement the handler class, still I am not getting it.
Refer this,
http://www.java2s.com/Code/Android/Media/UsingAudioRecord.htm for audio recording.
If you are not satisfied,https://www.tutorialspoint.com/android/android_audio_capture.htm for detailed explanation
I'm trying to convert 3gp to MP3 using Lame. I've got byte buffer after decoding file by built-in Android decoder. After this I put it into RAW file with OutputStream and then create MP3 using Lame. But it doesn't work. There are only noises in the file. Here is my code. Thank's for any help in advance.
public class MainActivityLame extends Activity {
static {
System.loadLibrary("mp3lame");
}
private native void initEncoder(int numChannels, int sampleRate, int bitRate, int mode, int quality);
private native void destroyEncoder();
private native int encodeFile(String sourcePath, String targetPath);
public static final int NUM_CHANNELS = 1;
public static final int SAMPLE_RATE = 44100;
public static final int BITRATE = 128;
public static final int MODE = 1;
public static final int QUALITY = 2;
private AudioRecord mRecorder;
private short[] mBuffer;
private File mRawFile;
private File mEncodedFile;
private TextView mTextViewFile;
private String strFile;
private AudioTrack audioTrack;
private int mChannels;
private ShortBuffer mDecodedSamples;
private ByteBuffer mDecodedBytes;
private int mFileSize;
private int bufferSize;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTextViewFile = (TextView) findViewById(R.id.textViewFile);
initRecorder();
initEncoder(NUM_CHANNELS, SAMPLE_RATE, BITRATE, MODE, QUALITY);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void onClickFolder(View view) throws IOException {
Intent questionIntent = new Intent(MainActivityLame.this, MyListActivity.class);
startActivityForResult(questionIntent, 1);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
String itemName = data.getStringExtra(MyListActivity.ITEM);
mTextViewFile.setText(itemName);
} else {
mTextViewFile.setText("");
}
}
#Override
public void onDestroy() {
mRecorder.release();
destroyEncoder();
super.onDestroy();
}
private void initRecorder() {
bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(), time.format("%Y%m%d%H%M%S") + "." + suffix);
}
public void onClickExample(View view) {
strFile = mTextViewFile.getText().toString().trim();
MediaExtractor extractor = new MediaExtractor();
MediaFormat format = null;
extractor.setDataSource(strFile);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
break;
}
}
MediaCodec codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
codec.configure(format, null, null, 0);
codec.start();
int decodedSamplesSize = 0;
byte[] decodedSamples = null;
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
int sample_size;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long presentation_time;
int tot_size_read = 0;
boolean done_reading = false;
mDecodedBytes = ByteBuffer.allocate(1 << 20);
while (true) {
int inputBufferId = codec.dequeueInputBuffer(100);
if (!done_reading && inputBufferId >= 0) {
sample_size = extractor.readSampleData(inputBuffers[inputBufferId], 0);
if (sample_size < 0) {
codec.queueInputBuffer(inputBufferId, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
done_reading = true;
} else {
presentation_time = extractor.getSampleTime();
codec.queueInputBuffer(inputBufferId, 0, sample_size, presentation_time, 0);
extractor.advance();
tot_size_read += sample_size;
}
}
int outputBufferId = codec.dequeueOutputBuffer(info, 100);
if (outputBufferId >= 0 && info.size > 0) {
if (decodedSamplesSize < info.size) {
decodedSamplesSize = info.size;
decodedSamples = new byte[decodedSamplesSize];
}
outputBuffers[outputBufferId].get(decodedSamples, 0, info.size);
outputBuffers[outputBufferId].clear();
if (mDecodedBytes.remaining() < info.size) {
int position = mDecodedBytes.position();
int newSize = (int) ((position * (1.0 * mFileSize / tot_size_read)) * 1.2);
if (newSize - position < info.size + 5 * (1 << 20)) {
newSize = position + info.size + 5 * (1 << 20);
}
ByteBuffer newDecodedBytes = null;
int retry = 10;
while (retry > 0) {
try {
newDecodedBytes = ByteBuffer.allocate(newSize);
break;
} catch (OutOfMemoryError oome) {
retry--;
}
}
if (retry == 0) {
break;
}
mDecodedBytes.rewind();
newDecodedBytes.put(mDecodedBytes);
mDecodedBytes = newDecodedBytes;
mDecodedBytes.position(position);
}
mDecodedBytes.put(decodedSamples, 0, info.size);
codec.releaseOutputBuffer(outputBufferId, false);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
format = codec.getOutputFormat();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
extractor.release();
extractor = null;
codec.stop();
codec.release();
codec = null;
mDecodedBytes.rewind();
mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN);
mDecodedSamples = mDecodedBytes.asShortBuffer();
mRawFile = getFile("raw");
OutputStream output = null;
try {
output = new BufferedOutputStream(new FileOutputStream(mRawFile));
try {
output.write(mDecodedBytes.array());
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
}
}
mEncodedFile = getFile("mp3");
int result = encodeFile(mRawFile.getAbsolutePath(), mEncodedFile.getAbsolutePath());
if (result == 0) {
Toast.makeText(MainActivityLame.this, "Encoded to " + mEncodedFile.getName(), Toast.LENGTH_SHORT).show();
}
}
}
I am using this library javaCV to record video on android. They have provided a sample VideoRecording Activity But There is some bug which I could not figure out what I am doing it wrong or what is missing which causing this bug.
package org.bytedeco.javacv.recordactivity;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import static org.bytedeco.javacpp.opencv_core.*;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/** The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
IplImage[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new IplImage[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
timestamps[i] = -1;
}
} else if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.record(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvIplimage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
I am getting this error
The type org.bytedeco.javacpp.avutil$AVFrame cannot be resolved. It is indirectly referenced from required .class files
at following line in the above code
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
}
at recorder.record(audioData). I don't know what I am doing wrong here. New to JavaCV. Any help will be appreciated.
The error message means that a jar containing the class org.bytedeco.javacpp.avutil$AVFrame cannot be found in the java class path. I would suggest to check the class path, the version of javacv used, maybe there were bugs that are now solved, you never know. Actually there are some similar discussions on javacv's github account, which may or may not be connected to your problem: javacv github, javacv github.
What makes me wonder is why you are putting your class in "org.bytedeco.javacv.recordactivity" package? "org.bytedeco.javacv" is javacv's own package, you just don't put your own class into the package structure that do not belong to you. It can only make you troubles in the future, for instance if javacv at some time decides to create package and class with the same name.
I have recently switched over to AsyncTask after reading how efficent it is. It took some time porting my code, but I did it in the end. I have 1 AsyncTask that gets run first, setting up the background image/color that it gets off a json object. After that, it sets up TableRows for all the other fields in the JSON file. The background changes, but the UI does not show up. I placed System.out.println's inside my AsyncTask and I see that the code has been executed (the onPostExecute method), but none of the UI shows up. I have no idea what the problem is, hence me posting this question.
Here is my AsyncTask:
class GetImageAsync extends AsyncTask<String, Void, Drawable> {
private final WeakReference<View> ViewReference;
private String data;
private Context context;
private boolean isImageView;
private boolean scale;
private int id = -1;
private int color = -1;
private int height = -1;
public GetImageAsync(TableLayout tr, String data, Context context) {
isImageView = false;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(tr);
System.out.println("inside async");
}
public GetImageAsync(ImageView iv, int id, Context context) {
isImageView = true;
this.context = context;
this.id = id;
ViewReference = new WeakReference<View>(iv);
}
public GetImageAsync(ImageView imageView,String data, Context context, int height) {
System.out.println("profile async");
this.height = height;
isImageView = true;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(imageView);
}
// Decode image in background.
#Override
protected Drawable doInBackground(String... params) {
System.out.println(id + " : " + isImageView);
if(isImageView && id != -1) {
return getImageFromResource();
} else {
if(data.startsWith("#")) {
color = Color.parseColor(data);
return null;
}
try {
return getImageFromWeb(data, scale);
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
private Drawable getImageFromResource() {
return context.getResources().getDrawable(id);
}
private Drawable getImageFromWeb(String data, boolean b) throws MalformedURLException, IOException {
Bitmap img = BitmapFactory.decodeStream(new URL(data).openConnection().getInputStream());
if(height != -1) {
return new BitmapDrawable(context.getResources(), getCroppedBitmap(Bitmap.createScaledBitmap(
Bitmap.createScaledBitmap(img,
img.getWidth(),
img.getWidth(), true),
height * 2,
height * 2, true)));
}
return new BitmapDrawable(context.getResources(), img);
}
// Once complete, see if ImageView is still around and set bitmap.
#Override
protected void onPostExecute(Drawable bitmap) {
if(isImageView) { //this block does not {
System.out.println("post "+isImageView);
System.out.println("iv");
if (ViewReference != null && bitmap != null) {
final ImageView imageView = (ImageView) ViewReference.get();
if (imageView != null) {
imageView.setImageDrawable(bitmap);
}
} // }
} else { //this block runs {
System.out.println("post "+isImageView);
if (ViewReference != null) {
final TableLayout tr = (TableLayout) ViewReference.get();
if (tr != null) {
if(color != -1) {
tr.setBackgroundColor(color);
} else {
tr.setBackground(bitmap);
}
}
}
} // }
}
public static Bitmap getCroppedBitmap(Bitmap bitmap) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int radius = Math.min(h / 2, w / 2);
Bitmap output = Bitmap.createBitmap(w + 8, h + 8, Config.ARGB_8888);
Paint p = new Paint();
p.setAntiAlias(true);
Canvas c = new Canvas(output);
c.drawARGB(0, 0, 0, 0);
p.setStyle(Style.FILL);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
p.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
c.drawBitmap(bitmap, 4, 4, p);
p.setXfermode(null);
p.setStyle(Style.STROKE);
p.setColor(Color.BLACK);
p.setStrokeWidth(1);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
return output;
}
}
and here is how I create the UI:
private void createUI(JSONObject jObject) throws JSONException {
int absIndex = 0;
if (android.os.Build.VERSION.SDK_INT >= 16) {
new GetImageAsync(tr, jObject.getString(("bg")), getApplicationContext()).execute("");
System.out.println("after async");
/*if (bgcolor != -1) {
System.out.println("color: " + bgcolor);
tr.setBackgroundColor(bgcolor);
} else if (bgpic != null) {
tr.setBackground(bgpic);
}*/
}
for (int i = 0; i < keys.length; i++) {
values.add(i, new ArrayList<String>());
values.get(i).add(0, keys[i]);
values.get(i).add(1, jObject.getString(keys[i]));
}
String lastString = WhoIsLast(jObject);
trcard.setBackground(getResources()
.getDrawable(R.drawable.bg_card/* abc_menu_dropdown_panel_holo_light */));
trcard.addView(tlcard);
tr.setPadding(0, 0, 0, 0);
TableLayout.LayoutParams tableRowParams = new TableLayout.LayoutParams(
TableLayout.LayoutParams.MATCH_PARENT,
TableLayout.LayoutParams.WRAP_CONTENT);
int[] attrs = { android.R.attr.dividerVertical };
TypedArray typedArray = getApplicationContext().obtainStyledAttributes(
attrs);
Drawable divider = typedArray.getDrawable(0);
typedArray.recycle();
tableRowParams.setMargins(20, 20, 20, 0);
trcard.setLayoutParams(tableRowParams);
tlcard.setDividerDrawable(divider);
tlcard.setDividerPadding(4);
tableScrollView.addView(trcard);
for (int i = 0; i < keys.length; i++) {
String value = values.get(i).get(1);
if (value != "") {
String key = values.get(i).get(0);
boolean last = false;
if (i == keys.length || value.equals(lastString) || i == 0) {
last = true;
System.out.println(value +" = "+lastString);
}
insertElement(value, key, absIndex++, last, drawables[i]);
if (!last) {
absIndex++;
}
}
}
}
private String WhoIsLast(JSONObject j) throws JSONException {
if (j.getString("facebook").equals("")) {
return "";
}
if (j.getString("twitter").equals("")) {
return "facebook";
}
if (j.getString("email").equals("")) {
return "twitter";
}
if (j.getString("phone").equals("")) {
return "email";
}
return "";
}
int absIndex = 0;
private void insertElement(String data, String key, int i,
boolean b, Integer id) throws JSONException {
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View newRow = inflater.inflate(R.layout.row, null, false);
newRow.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT,
TableRow.LayoutParams.WRAP_CONTENT));
TextView dataTextView = (TextView) newRow
.findViewById(R.id.rowTextView);
dataTextView.setText("\t " + data);
ImageView iv = (ImageView) newRow.findViewById(R.id.rowImageView);
newRow.setId(absIndex++);
newRow.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View view) {
//omitted
});
View v = new View(this);
v.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT, 1));
v.setBackgroundColor(Color.argb(25, 111, 111, 111));
dataTextView.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED);
if (i == 0) {
System.out.println("before async iv");
new GetImageAsync(iv, jObject.getString("profilepic"), getApplicationContext(), dataTextView.getMeasuredHeight()).execute("");
System.out.println("after async iv");
//async.execute("");
dataTextView.setShadowLayer(3, 0, 0, Color.BLACK);
dataTextView.setTextColor(getResources().getColor(R.color.white));
} else {
new GetImageAsync(iv, id, getApplicationContext()).execute("");
}
if (i == 0) {
dataTextView.setTextSize(30);
tableScrollView.addView(newRow, i);
System.out.println("adding i=0null");
} else {
System.out.println("adding i = "+i +tlcard);
tlcard.addView(newRow, i - 1);
if (!b) {
tlcard.addView(v, i);
}
}
}
Edit: where I call createUi method:
final Thread thread = new Thread(new Runnable() {
#Override
public void run() {
jObject = getJson("http://www.tabcards.com/req/androidapi/L2o30H8JlFMtFYHW3KLxkts20ztc5Be6Z6m6v315/json/"
+ value);
System.out.println(value);
if (jObject != null) {
runOnUiThread(new Runnable() {
#Override
public void run() {
try {
createUI(jObject);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
thread.start();
I want to use a library for my android project.
When I use an external library, everything works.
But when I add .jar to my folder "libs", there is this error :
04-08 10:39:47.879: E/AndroidRuntime(13453): java.lang.RuntimeException: Unable to start activity ComponentInfo{fr.allbrary/eu.janmuller.android.simplecropimage.CropImage}: java.lang.NullPointerException
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2245)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2295)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.access$700(ActivityThread.java:150)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1280)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.os.Handler.dispatchMessage(Handler.java:99)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.os.Looper.loop(Looper.java:176)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.main(ActivityThread.java:5279)
04-08 10:39:47.879: E/AndroidRuntime(13453): at java.lang.reflect.Method.invokeNative(Native Method)
04-08 10:39:47.879: E/AndroidRuntime(13453): at java.lang.reflect.Method.invoke(Method.java:511)
04-08 10:39:47.879: E/AndroidRuntime(13453): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1102)
04-08 10:39:47.879: E/AndroidRuntime(13453): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:869)
04-08 10:39:47.879: E/AndroidRuntime(13453): at dalvik.system.NativeStart.main(Native Method)
04-08 10:39:47.879: E/AndroidRuntime(13453): Caused by: java.lang.NullPointerException
04-08 10:39:47.879: E/AndroidRuntime(13453): at eu.janmuller.android.simplecropimage.CropImage.onCreate(CropImage.java:167)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.Activity.performCreate(Activity.java:5267)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1097)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2209)
04-08 10:39:47.879: E/AndroidRuntime(13453): ... 11 more
Code :
/**
* The activity can crop specific region of interest from an image.
*/
public class CropImage extends MonitoredActivity {
final int IMAGE_MAX_SIZE = 1024;
private static final String TAG = "CropImage";
public static final String IMAGE_PATH = "image-path";
public static final String SCALE = "scale";
public static final String ORIENTATION_IN_DEGREES = "orientation_in_degrees";
public static final String ASPECT_X = "aspectX";
public static final String ASPECT_Y = "aspectY";
public static final String OUTPUT_X = "outputX";
public static final String OUTPUT_Y = "outputY";
public static final String SCALE_UP_IF_NEEDED = "scaleUpIfNeeded";
public static final String CIRCLE_CROP = "circleCrop";
public static final String RETURN_DATA = "return-data";
public static final String RETURN_DATA_AS_BITMAP = "data";
public static final String ACTION_INLINE_DATA = "inline-data";
// These are various options can be specified in the intent.
private Bitmap.CompressFormat mOutputFormat = Bitmap.CompressFormat.JPEG;
private Uri mSaveUri = null;
private boolean mDoFaceDetection = true;
private boolean mCircleCrop = false;
private final Handler mHandler = new Handler();
private int mAspectX;
private int mAspectY;
private int mOutputX;
private int mOutputY;
private boolean mScale;
private CropImageView mImageView;
private ContentResolver mContentResolver;
private Bitmap mBitmap;
private String mImagePath;
boolean mWaitingToPick; // Whether we are wait the user to pick a face.
boolean mSaving; // Whether the "save" button is already clicked.
HighlightView mCrop;
// These options specifiy the output image size and whether we should
// scale the output to fit it (or just crop it).
private boolean mScaleUp = true;
private final BitmapManager.ThreadSet mDecodingThreads =
new BitmapManager.ThreadSet();
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
mContentResolver = getContentResolver();
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.cropimage);
mImageView = (CropImageView) findViewById(R.id.image);
showStorageToast(this);
Intent intent = getIntent();
Bundle extras = intent.getExtras();
if (extras != null) {
if (extras.getString(CIRCLE_CROP) != null) {
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
mImageView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
mCircleCrop = true;
mAspectX = 1;
mAspectY = 1;
}
mImagePath = extras.getString(IMAGE_PATH);
mSaveUri = getImageUri(mImagePath);
mBitmap = getBitmap(mImagePath);
if (extras.containsKey(ASPECT_X) && extras.get(ASPECT_X) instanceof Integer) {
mAspectX = extras.getInt(ASPECT_X);
} else {
throw new IllegalArgumentException("aspect_x must be integer");
}
if (extras.containsKey(ASPECT_Y) && extras.get(ASPECT_Y) instanceof Integer) {
mAspectY = extras.getInt(ASPECT_Y);
} else {
throw new IllegalArgumentException("aspect_y must be integer");
}
mOutputX = extras.getInt(OUTPUT_X);
mOutputY = extras.getInt(OUTPUT_Y);
mScale = extras.getBoolean(SCALE, true);
mScaleUp = extras.getBoolean(SCALE_UP_IF_NEEDED, true);
}
if (mBitmap == null) {
Log.d(TAG, "finish!!!");
finish();
return;
}
// Make UI fullscreen.
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
findViewById(R.id.discard).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
setResult(RESULT_CANCELED);
finish();
}
});
findViewById(R.id.save).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
try {
onSaveClicked();
} catch (Exception e) {
finish();
}
}
});
findViewById(R.id.rotateLeft).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
mBitmap = Util.rotateImage(mBitmap, -90);
RotateBitmap rotateBitmap = new RotateBitmap(mBitmap);
mImageView.setImageRotateBitmapResetBase(rotateBitmap, true);
mRunFaceDetection.run();
}
});
findViewById(R.id.rotateRight).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
mBitmap = Util.rotateImage(mBitmap, 90);
RotateBitmap rotateBitmap = new RotateBitmap(mBitmap);
mImageView.setImageRotateBitmapResetBase(rotateBitmap, true);
mRunFaceDetection.run();
}
});
startFaceDetection();
}
private Uri getImageUri(String path) {
return Uri.fromFile(new File(path));
}
private Bitmap getBitmap(String path) {
Uri uri = getImageUri(path);
InputStream in = null;
try {
in = mContentResolver.openInputStream(uri);
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, o);
in.close();
int scale = 1;
if (o.outHeight > IMAGE_MAX_SIZE || o.outWidth > IMAGE_MAX_SIZE) {
scale = (int) Math.pow(2, (int) Math.round(Math.log(IMAGE_MAX_SIZE / (double) Math.max(o.outHeight, o.outWidth)) / Math.log(0.5)));
}
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
in = mContentResolver.openInputStream(uri);
Bitmap b = BitmapFactory.decodeStream(in, null, o2);
in.close();
return b;
} catch (FileNotFoundException e) {
Log.e(TAG, "file " + path + " not found");
} catch (IOException e) {
Log.e(TAG, "file " + path + " not found");
}
return null;
}
private void startFaceDetection() {
if (isFinishing()) {
return;
}
mImageView.setImageBitmapResetBase(mBitmap, true);
Util.startBackgroundJob(this, null,
"Please wait\u2026",
new Runnable() {
public void run() {
final CountDownLatch latch = new CountDownLatch(1);
final Bitmap b = mBitmap;
mHandler.post(new Runnable() {
public void run() {
if (b != mBitmap && b != null) {
mImageView.setImageBitmapResetBase(b, true);
mBitmap.recycle();
mBitmap = b;
}
if (mImageView.getScale() == 1F) {
mImageView.center(true, true);
}
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mRunFaceDetection.run();
}
}, mHandler);
}
private void onSaveClicked() throws Exception {
// TODO this code needs to change to use the decode/crop/encode single
// step api so that we don't require that the whole (possibly large)
// bitmap doesn't have to be read into memory
if (mSaving) return;
if (mCrop == null) {
return;
}
mSaving = true;
Rect r = mCrop.getCropRect();
int width = r.width();
int height = r.height();
// If we are circle cropping, we want alpha channel, which is the
// third param here.
Bitmap croppedImage;
try {
croppedImage = Bitmap.createBitmap(width, height,
mCircleCrop ? Bitmap.Config.ARGB_8888 : Bitmap.Config.RGB_565);
} catch (Exception e) {
throw e;
}
if (croppedImage == null) {
return;
}
{
Canvas canvas = new Canvas(croppedImage);
Rect dstRect = new Rect(0, 0, width, height);
canvas.drawBitmap(mBitmap, r, dstRect, null);
}
if (mCircleCrop) {
// OK, so what's all this about?
// Bitmaps are inherently rectangular but we want to return
// something that's basically a circle. So we fill in the
// area around the circle with alpha. Note the all important
// PortDuff.Mode.CLEAR.
Canvas c = new Canvas(croppedImage);
Path p = new Path();
p.addCircle(width / 2F, height / 2F, width / 2F,
Path.Direction.CW);
c.clipPath(p, Region.Op.DIFFERENCE);
c.drawColor(0x00000000, PorterDuff.Mode.CLEAR);
}
/* If the output is required to a specific size then scale or fill */
if (mOutputX != 0 && mOutputY != 0) {
if (mScale) {
/* Scale the image to the required dimensions */
Bitmap old = croppedImage;
croppedImage = Util.transform(new Matrix(),
croppedImage, mOutputX, mOutputY, mScaleUp);
if (old != croppedImage) {
old.recycle();
}
} else {
/* Don't scale the image crop it to the size requested.
* Create an new image with the cropped image in the center and
* the extra space filled.
*/
// Don't scale the image but instead fill it so it's the
// required dimension
Bitmap b = Bitmap.createBitmap(mOutputX, mOutputY,
Bitmap.Config.RGB_565);
Canvas canvas = new Canvas(b);
Rect srcRect = mCrop.getCropRect();
Rect dstRect = new Rect(0, 0, mOutputX, mOutputY);
int dx = (srcRect.width() - dstRect.width()) / 2;
int dy = (srcRect.height() - dstRect.height()) / 2;
/* If the srcRect is too big, use the center part of it. */
srcRect.inset(Math.max(0, dx), Math.max(0, dy));
/* If the dstRect is too big, use the center part of it. */
dstRect.inset(Math.max(0, -dx), Math.max(0, -dy));
/* Draw the cropped bitmap in the center */
canvas.drawBitmap(mBitmap, srcRect, dstRect, null);
/* Set the cropped bitmap as the new bitmap */
croppedImage.recycle();
croppedImage = b;
}
}
// Return the cropped image directly or save it to the specified URI.
Bundle myExtras = getIntent().getExtras();
if (myExtras != null && (myExtras.getParcelable("data") != null
|| myExtras.getBoolean(RETURN_DATA))) {
Bundle extras = new Bundle();
extras.putParcelable(RETURN_DATA_AS_BITMAP, croppedImage);
setResult(RESULT_OK,
(new Intent()).setAction(ACTION_INLINE_DATA).putExtras(extras));
finish();
} else {
final Bitmap b = croppedImage;
Util.startBackgroundJob(this, null, getString(R.string.saving_image),
new Runnable() {
public void run() {
saveOutput(b);
}
}, mHandler);
}
}
private void saveOutput(Bitmap croppedImage) {
if (mSaveUri != null) {
OutputStream outputStream = null;
try {
outputStream = mContentResolver.openOutputStream(mSaveUri);
if (outputStream != null) {
croppedImage.compress(mOutputFormat, 90, outputStream);
}
} catch (IOException ex) {
Log.e(TAG, "Cannot open file: " + mSaveUri, ex);
setResult(RESULT_CANCELED);
finish();
return;
} finally {
Util.closeSilently(outputStream);
}
Bundle extras = new Bundle();
Intent intent = new Intent(mSaveUri.toString());
intent.putExtras(extras);
intent.putExtra(IMAGE_PATH, mImagePath);
intent.putExtra(ORIENTATION_IN_DEGREES, Util.getOrientationInDegree(this));
setResult(RESULT_OK, intent);
} else {
Log.e(TAG, "not defined image url");
}
croppedImage.recycle();
finish();
}
#Override
protected void onPause() {
super.onPause();
BitmapManager.instance().cancelThreadDecoding(mDecodingThreads);
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mBitmap != null) {
mBitmap.recycle();
}
}
Runnable mRunFaceDetection = new Runnable() {
#SuppressWarnings("hiding")
float mScale = 1F;
Matrix mImageMatrix;
FaceDetector.Face[] mFaces = new FaceDetector.Face[3];
int mNumFaces;
// For each face, we create a HightlightView for it.
private void handleFace(FaceDetector.Face f) {
PointF midPoint = new PointF();
int r = ((int) (f.eyesDistance() * mScale)) * 2;
f.getMidPoint(midPoint);
midPoint.x *= mScale;
midPoint.y *= mScale;
int midX = (int) midPoint.x;
int midY = (int) midPoint.y;
HighlightView hv = new HighlightView(mImageView);
int width = mBitmap.getWidth();
int height = mBitmap.getHeight();
Rect imageRect = new Rect(0, 0, width, height);
RectF faceRect = new RectF(midX, midY, midX, midY);
faceRect.inset(-r, -r);
if (faceRect.left < 0) {
faceRect.inset(-faceRect.left, -faceRect.left);
}
if (faceRect.top < 0) {
faceRect.inset(-faceRect.top, -faceRect.top);
}
if (faceRect.right > imageRect.right) {
faceRect.inset(faceRect.right - imageRect.right,
faceRect.right - imageRect.right);
}
if (faceRect.bottom > imageRect.bottom) {
faceRect.inset(faceRect.bottom - imageRect.bottom,
faceRect.bottom - imageRect.bottom);
}
hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
mAspectX != 0 && mAspectY != 0);
mImageView.add(hv);
}
// Create a default HightlightView if we found no face in the picture.
private void makeDefault() {
HighlightView hv = new HighlightView(mImageView);
int width = mBitmap.getWidth();
int height = mBitmap.getHeight();
Rect imageRect = new Rect(0, 0, width, height);
// make the default size about 4/5 of the width or height
int cropWidth = Math.min(width, height) * 4 / 5;
int cropHeight = cropWidth;
if (mAspectX != 0 && mAspectY != 0) {
if (mAspectX > mAspectY) {
cropHeight = cropWidth * mAspectY / mAspectX;
} else {
cropWidth = cropHeight * mAspectX / mAspectY;
}
}
int x = (width - cropWidth) / 2;
int y = (height - cropHeight) / 2;
RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
hv.setup(mImageMatrix, imageRect, cropRect, mCircleCrop,
mAspectX != 0 && mAspectY != 0);
mImageView.mHighlightViews.clear(); // Thong added for rotate
mImageView.add(hv);
}
// Scale the image down for faster face detection.
private Bitmap prepareBitmap() {
if (mBitmap == null) {
return null;
}
// 256 pixels wide is enough.
if (mBitmap.getWidth() > 256) {
mScale = 256.0F / mBitmap.getWidth();
}
Matrix matrix = new Matrix();
matrix.setScale(mScale, mScale);
return Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
}
public void run() {
mImageMatrix = mImageView.getImageMatrix();
Bitmap faceBitmap = prepareBitmap();
mScale = 1.0F / mScale;
if (faceBitmap != null && mDoFaceDetection) {
FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
faceBitmap.getHeight(), mFaces.length);
mNumFaces = detector.findFaces(faceBitmap, mFaces);
}
if (faceBitmap != null && faceBitmap != mBitmap) {
faceBitmap.recycle();
}
mHandler.post(new Runnable() {
public void run() {
mWaitingToPick = mNumFaces > 1;
if (mNumFaces > 0) {
for (int i = 0; i < mNumFaces; i++) {
handleFace(mFaces[i]);
}
} else {
makeDefault();
}
mImageView.invalidate();
if (mImageView.mHighlightViews.size() == 1) {
mCrop = mImageView.mHighlightViews.get(0);
mCrop.setFocus(true);
}
if (mNumFaces > 1) {
Toast.makeText(CropImage.this,
"Multi face crop help",
Toast.LENGTH_SHORT).show();
}
}
});
}
};
public static final int NO_STORAGE_ERROR = -1;
public static final int CANNOT_STAT_ERROR = -2;
public static void showStorageToast(Activity activity) {
showStorageToast(activity, calculatePicturesRemaining(activity));
}
public static void showStorageToast(Activity activity, int remaining) {
String noStorageText = null;
if (remaining == NO_STORAGE_ERROR) {
String state = Environment.getExternalStorageState();
if (state.equals(Environment.MEDIA_CHECKING)) {
noStorageText = activity.getString(R.string.preparing_card);
} else {
noStorageText = activity.getString(R.string.no_storage_card);
}
} else if (remaining < 1) {
noStorageText = activity.getString(R.string.not_enough_space);
}
if (noStorageText != null) {
Toast.makeText(activity, noStorageText, 5000).show();
}
}
public static int calculatePicturesRemaining(Activity activity) {
try {
/*if (!ImageManager.hasStorage()) {
return NO_STORAGE_ERROR;
} else {*/
String storageDirectory = "";
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
storageDirectory = Environment.getExternalStorageDirectory().toString();
}
else {
storageDirectory = activity.getFilesDir().toString();
}
StatFs stat = new StatFs(storageDirectory);
float remaining = ((float) stat.getAvailableBlocks()
* (float) stat.getBlockSize()) / 400000F;
return (int) remaining;
//}
} catch (Exception ex) {
// if we can't stat the filesystem then we don't know how many
// pictures are remaining. it might be zero but just leave it
// blank since we really don't know.
return CANNOT_STAT_ERROR;
}
}
}
Why ?
Add jar file using following procedure :-
Right click on the project--->Build Path--->Configure Build Path...--->In left side you have to choose Java Build Path--->Libraries--->Add External JARs--->ok--->ok
Hope this will resolve the issue
This is a build path issue.
Make sure your bin folder is not included in your build path.
Right click on your project > go to properties > Build Path.
Make sure that Honeycomb library is in your libs/ folder and not in your source folder.
Include the libraries in libs/ individually in the build path.
BTW, you may want to bring in the android-support-v4 library to get Ice Cream Sandwich support instead of the Honeycomb support library
You cant simply add the jar files in your app.. Go to buildpath and then add external jars into your app then it will work