I am trying to find a way to show the uploading file's progress in Progress Bar and percentage. I need to add a progress bar and transfer percentage of the file.
Here's my code. It's transferring file but has no progress bar.
package com.example.christian.progressbar5;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.util.Base64;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class MainActivity extends AppCompatActivity {
ImageView ivImage;
Button btnUpload;
ProgressBar progressBar;
EditText etFilename;
final int GALLERY_REQUEST = 38473;
Bitmap bitmap;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
etFilename = (EditText) findViewById(R.id.etFilename);
ivImage = (ImageView) findViewById(R.id.ivImage);
btnUpload = (Button) findViewById(R.id.btnUpload);
progressBar = (ProgressBar) findViewById(R.id.progressBar);
ivImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(intent, GALLERY_REQUEST);
}
});
btnUpload.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
uploadImage();
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode == RESULT_OK) {
if(requestCode == GALLERY_REQUEST){
Uri uri = data.getData();
try {
bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
ivImage.setImageBitmap(bitmap);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private void uploadImage()
{
final String filename = etFilename.getText().toString();
RequestQueue queue = Volley.newRequestQueue(MainActivity.this);
String URL = "http://inventsystem.esy.es/uploadimage2.php";
Bitmap resized = getResizedBitmap(bitmap, 500);
final String photo = ImageToString(resized);
if(bitmap == null || bitmap.equals("")){
Toast.makeText(this, "No Image", Toast.LENGTH_SHORT).show();
return;
}
if(filename.equals("")){
Toast.makeText(this, "What's the filename?", Toast.LENGTH_SHORT).show();
return;
}
StringRequest stringRequest = new StringRequest(Request.Method.POST, URL, new Response.Listener<String>() {
#Override
public void onResponse(String response) {
Toast.makeText(MainActivity.this, "Image has been Uploaded", Toast.LENGTH_SHORT).show();
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(MainActivity.this, "Something went wrong...", Toast.LENGTH_SHORT).show();
}
}){
#Override
protected Map<String, String> getParams() throws AuthFailureError {
Map<String, String> params = new HashMap<>();
params.put("image", photo);
params.put("name", filename);
return params;
}
};
queue.add(stringRequest);
}
private String ImageToString(Bitmap bitmap)
{
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG,100,byteArray);
byte[] imgBytes = byteArray.toByteArray();
return Base64.encodeToString(imgBytes,Base64.DEFAULT);
}
public Bitmap getResizedBitmap(Bitmap image, int maxSize) {
int width = image.getWidth();
int height = image.getHeight();
float bitmapRatio = (float)width / (float) height;
if (bitmapRatio > 1) {
width = maxSize;
height = (int) (width / bitmapRatio);
} else {
height = maxSize;
width = (int) (height * bitmapRatio);
}
return Bitmap.createScaledBitmap(image, width, height, true);
}
}
You can use VolleyPlus instead of Volley to add this feature.
jsonRequest.setOnProgressListener(new Response.ProgressListener() {
#Override
public void onProgress(long transferredBytes, long totalSize) {
int percentage = (int) ((transferredBytes / ((float) totalSize)) * 100);
}
});
There's a nice code snipplet how to do it here:
https://github.com/DWorkS/VolleyPlus/issues/53
Related
I have the following code that runs in a service where it captures screenshots from Android. What technology could I use to send a remote command to this service to execute the task? I don't want it to run from a MainActivity button. Firebase cloud messaging ? Websocket ?
ScreenCaptureService.java
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Notification;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.Image;
import android.media.ImageReader;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.util.Log;
import android.view.Display;
import android.view.OrientationEventListener;
import android.view.WindowManager;
import androidx.annotation.RequiresApi;
import androidx.core.util.Pair;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Objects;
public class ScreenCaptureService extends Service {
private static final String TAG = "ScreenCaptureService";
private static final String RESULT_CODE = "RESULT_CODE";
private static final String DATA = "DATA";
private static final String ACTION = "ACTION";
private static final String START = "START";
private static final String STOP = "STOP";
private static final String SCREENCAP_NAME = "screencap";
private static int IMAGES_PRODUCED;
private MediaProjection mMediaProjection;
private String mStoreDir;
private ImageReader mImageReader;
private Handler mHandler;
private Display mDisplay;
private VirtualDisplay mVirtualDisplay;
private int mDensity;
private int mWidth;
private int mHeight;
private int mRotation;
private OrientationChangeCallback mOrientationChangeCallback;
public static Intent getStartIntent(Context context, int resultCode, Intent data) {
Intent intent = new Intent(context, ScreenCaptureService.class);
intent.putExtra(ACTION, START);
intent.putExtra(RESULT_CODE, resultCode);
intent.putExtra(DATA, data);
return intent;
}
public static Intent getStopIntent(Context context) {
Intent intent = new Intent(context, ScreenCaptureService.class);
intent.putExtra(ACTION, STOP);
return intent;
}
private static boolean isStartCommand(Intent intent) {
return intent.hasExtra(RESULT_CODE) && intent.hasExtra(DATA)
&& intent.hasExtra(ACTION) && Objects.equals(intent.getStringExtra(ACTION), START);
}
private static boolean isStopCommand(Intent intent) {
return intent.hasExtra(ACTION) && Objects.equals(intent.getStringExtra(ACTION), STOP);
}
private static int getVirtualDisplayFlags() {
return DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC;
}
private class ImageAvailableListener implements ImageReader.OnImageAvailableListener {
#Override
public void onImageAvailable(ImageReader reader) {
FileOutputStream fos = null;
Bitmap bitmap = null;
try (Image image = mImageReader.acquireLatestImage()) {
if (image != null) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * mWidth;
// create bitmap
bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
// write bitmap to a file
fos = new FileOutputStream(mStoreDir + "/myscreen_" + IMAGES_PRODUCED + ".png");
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
IMAGES_PRODUCED++;
Log.e(TAG, "captured image: " + IMAGES_PRODUCED);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
if (bitmap != null) {
bitmap.recycle();
}
}
}
}
private class OrientationChangeCallback extends OrientationEventListener {
OrientationChangeCallback(Context context) {
super(context);
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void onOrientationChanged(int orientation) {
final int rotation = mDisplay.getRotation();
if (rotation != mRotation) {
mRotation = rotation;
try {
// clean up
if (mVirtualDisplay != null) mVirtualDisplay.release();
if (mImageReader != null) mImageReader.setOnImageAvailableListener(null, null);
// re-create virtual display depending on device width / height
createVirtualDisplay();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private class MediaProjectionStopCallback extends MediaProjection.Callback {
#Override
public void onStop() {
Log.e(TAG, "stopping projection.");
mHandler.post(new Runnable() {
#Override
public void run() {
if (mVirtualDisplay != null) mVirtualDisplay.release();
if (mImageReader != null) mImageReader.setOnImageAvailableListener(null, null);
if (mOrientationChangeCallback != null) mOrientationChangeCallback.disable();
mMediaProjection.unregisterCallback(MediaProjectionStopCallback.this);
}
});
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public void onCreate() {
super.onCreate();
// create store dir
File externalFilesDir = getExternalFilesDir(null);
if (externalFilesDir != null) {
mStoreDir = externalFilesDir.getAbsolutePath() + "/screenshots/";
File storeDirectory = new File(mStoreDir);
if (!storeDirectory.exists()) {
boolean success = storeDirectory.mkdirs();
if (!success) {
Log.e(TAG, "failed to create file storage directory.");
stopSelf();
}
}
} else {
Log.e(TAG, "failed to create file storage directory, getExternalFilesDir is null.");
stopSelf();
}
// start capture handling thread
new Thread() {
#Override
public void run() {
Looper.prepare();
mHandler = new Handler();
Looper.loop();
}
}.start();
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (isStartCommand(intent)) {
// create notification
Pair<Integer, Notification> notification = NotificationUtils.getNotification(this);
startForeground(notification.first, notification.second);
// start projection
int resultCode = intent.getIntExtra(RESULT_CODE, Activity.RESULT_CANCELED);
Intent data = intent.getParcelableExtra(DATA);
startProjection(resultCode, data);
} else if (isStopCommand(intent)) {
stopProjection();
stopSelf();
} else {
stopSelf();
}
return START_NOT_STICKY;
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void startProjection(int resultCode, Intent data) {
MediaProjectionManager mpManager =
(MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
if (mMediaProjection == null) {
mMediaProjection = mpManager.getMediaProjection(resultCode, data);
if (mMediaProjection != null) {
// display metrics
mDensity = Resources.getSystem().getDisplayMetrics().densityDpi;
WindowManager windowManager = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
mDisplay = windowManager.getDefaultDisplay();
// create virtual display depending on device width / height
createVirtualDisplay();
// register orientation change callback
mOrientationChangeCallback = new OrientationChangeCallback(this);
if (mOrientationChangeCallback.canDetectOrientation()) {
mOrientationChangeCallback.enable();
}
// register media projection stop callback
mMediaProjection.registerCallback(new MediaProjectionStopCallback(), mHandler);
}
}
}
private void stopProjection() {
if (mHandler != null) {
mHandler.post(new Runnable() {
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void run() {
if (mMediaProjection != null) {
mMediaProjection.stop();
}
}
});
}
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#SuppressLint("WrongConstant")
private void createVirtualDisplay() {
// get width and height
mWidth = Resources.getSystem().getDisplayMetrics().widthPixels;
mHeight = Resources.getSystem().getDisplayMetrics().heightPixels;
// start capture reader
mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
mVirtualDisplay = mMediaProjection.createVirtualDisplay(SCREENCAP_NAME, mWidth, mHeight,
mDensity, getVirtualDisplayFlags(), mImageReader.getSurface(), null, mHandler);
mImageReader.setOnImageAvailableListener(new ImageAvailableListener(), mHandler);
}
}
MainActivity.java
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.Image;
import android.media.ImageReader;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.widget.Button;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Date;
import app.mobile.secure.appservice.api.AppUtil;
import app.mobile.secure.appservice.configuracao.Permissao;
import app.mobile.secure.appservice.service.MonitorService;
public class MainActivity extends AppCompatActivity {
private MediaProjectionManager projectionManager;
private static final int REQUEST_CODE = 100;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
projectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
startActivityForResult(projectionManager.createScreenCaptureIntent(), REQUEST_CODE);
}
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE) {
if (resultCode == Activity.RESULT_OK) {
startService(app.mobile.secure.appservice.service.ScreenCaptureService.getStartIntent(this, resultCode, data));
startProjection();
}
}
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void startProjection() {
MediaProjectionManager mProjectionManager =
(MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
}
}
Try using firebase Messaging in conjunction with your backend.
You can read about it here:
https://firebase.google.com/docs/cloud-messaging/?authuser=0#implementation_paths
https://firebase.google.com/docs/cloud-messaging/js/client?authuser=0
I have successfully built a python server which even works but when java from android studio tries to connect to it fails with whole bunch of errors. I have understood that it fails while creating a new socket object but why that I don't know.
This is Java client, see at the end of the code particularly for the issue where I have created new Socket object:
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Application;
import android.content.ActivityNotFoundException;
import android.content.ContentValues;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.media.Image;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.provider.MediaStore;
import android.provider.Settings;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.speech.tts.TextToSpeech;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.GridLayout;
import android.widget.TextView;
import com.chaquo.python.PyObject;
import com.chaquo.python.Python;
import com.chaquo.python.android.AndroidPlatform;
import com.google.android.gms.location.FusedLocationProviderClient;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.ml.vision.FirebaseVision;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import com.google.firebase.ml.vision.text.FirebaseVisionText;
import com.google.firebase.ml.vision.text.FirebaseVisionTextDetector;
import org.w3c.dom.Text;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.net.*;
import java.io.*;
import static android.Manifest.permission.ACCESS_FINE_LOCATION;
import static android.Manifest.permission.CAMERA;
import static android.Manifest.permission.READ_EXTERNAL_STORAGE;
import static android.Manifest.permission.RECORD_AUDIO;
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private Button btnRecognize;
private SpeechRecognizer speechRecognizer;
static EditText ET_ShowRecognized;
String locality;
private Intent intent;
private String ET_ShowRecognizedText;
private String ProcessingText;
//private FusedLocationProviderClient fusedLocationProviderClient;
//Geocoder geocoder;
Python py;
PyObject pyobj;
PyObject obj;
String currentDate;
String currentTime;
static TextToSpeech tts;
Uri imageURI;
ContentValues contentValues;
Intent cameraIntent;
static final int REQUEST_IMAGE_CAPTURE = 1;
Image mediaImage;
FirebaseVisionImage firebaseVisionImage;
static Bitmap imageBitmap;
FirebaseVisionTextDetector textDetector;
String imgText;
Intent CameraIntent;
static Thread sent;
static Thread receive;
static Socket socket;
InputStreamReader in;
BufferedReader bf;
String ServerOutput;
PrintWriter writer;
String ServerInput;
#SuppressLint({"SetTextI18n", "ClickableViewAccessibility", "MissingPermission"})
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, new String[]{RECORD_AUDIO, WRITE_EXTERNAL_STORAGE, READ_EXTERNAL_STORAGE, ACCESS_FINE_LOCATION, CAMERA}, PackageManager.PERMISSION_GRANTED);
ET_ShowRecognized = findViewById(R.id.ET_ShowRecognized);
btnRecognize = findViewById(R.id.btnRecognize);
/*fusedLocationProviderClient.getLastLocation().addOnCompleteListener(new OnCompleteListener<Location>() {
#Override
public void onComplete(#NonNull Task<Location> task) {
Location location = task.getResult();
if(location != null){
geocoder = new Geocoder(MainActivity.this, Locale.getDefault());
try {
List<Address> address = geocoder.getFromLocation(location.getLatitude(), location.getLongitude(), 1);
locality = address.get(0).getLocality();
} catch (IOException e) {
;
}
}
}
});
if(!Python.isStarted()){
Python.start(new AndroidPlatform(this));
}
py = Python.getInstance();
pyobj = py.getModule("WolframAlpha");
obj = pyobj.callAttr("main", locality);*/
tts = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int i) {
if (i == TextToSpeech.SUCCESS) {
tts.setLanguage(Locale.ENGLISH);
}
tts.speak("Hi you successfully ran me.", TextToSpeech.QUEUE_FLUSH, null, null);
tts.speak("Seems good to meet you.", TextToSpeech.QUEUE_FLUSH, null, null);
}
});
//currentDate = new SimpleDateFormat("dd-MM-yyyy", Locale.getDefault()).format(new Date());
//currentTime = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()).format(new Date());
//textToSpeech.speak("Hi! I am your personal assistant. Today date is something something ", TextToSpeech.QUEUE_FLUSH, null, null);
//Speak("Today's weather forecast for the current location is " + obj.toString());
intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new RecognitionListener() {
#Override
public void onReadyForSpeech(Bundle bundle) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float v) {
}
#Override
public void onBufferReceived(byte[] bytes) {
}
#Override
public void onEndOfSpeech() {
}
#Override
public void onError(int i) {
}
#Override
public void onResults(Bundle bundle) {
ArrayList<String> mathches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
if (mathches != null) {
ET_ShowRecognized.setText(mathches.get(0));
process();
}
}
#Override
public void onPartialResults(Bundle bundle) {
}
#Override
public void onEvent(int i, Bundle bundle) {
}
});
btnRecognize.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View view, MotionEvent motionEvent) {
switch (motionEvent.getAction()) {
case MotionEvent.ACTION_UP:
speechRecognizer.stopListening();
break;
case MotionEvent.ACTION_DOWN:
ET_ShowRecognized.setText(null);
ET_ShowRecognized.setText("Listening...");
speechRecognizer.startListening(intent);
break;
default:
break;
}
return false;
}
});
}
public void process() {
ProcessingText = ET_ShowRecognized.getText().toString().toLowerCase();
if(ProcessingText.contains("hello")) {
tts.speak("Hi! I hope all is well.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("hi")){
tts.speak("Hello! Nice to meet you.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("your name")){
tts.speak("My name is assistant.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("recognise text")){
tts.speak("Opening Camera.", TextToSpeech.QUEUE_FLUSH, null, null);
dispatchTakePictureIntent();
}
else if(ProcessingText.contains("bye")){
finish();
System.exit(0);
}
else if(ProcessingText.contains("current temperature")){
/*try {
socket = new Socket("192.168.43.203",12345);
} catch (UnknownHostException e1) {
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
sent = new Thread(new Runnable(){
#Override
public void run() {
try {
bf = new BufferedReader(new InputStreamReader(socket.getInputStream()));
while(true){
ServerOutput = bf.readLine().toString();
MainActivity.tts.speak(ServerOutput, TextToSpeech.QUEUE_FLUSH, null, null);
MainActivity.ET_ShowRecognized.setText(ServerOutput);
}
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
sent.start();
try {
sent.join();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}*/
recieve_data();
}else {
tts.speak(ProcessingText, TextToSpeech.QUEUE_FLUSH, null, null);
}
}
private void dispatchTakePictureIntent() {
CameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
try {
startActivityForResult(CameraIntent, REQUEST_IMAGE_CAPTURE);
} catch (ActivityNotFoundException e) {
// display error state to the user
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
Bundle extras = data.getExtras();
imageBitmap = (Bitmap) extras.get("data");
//imageView.setImageBitmap(imageBitmap);
detectTextFromImage();
}
}
private void detectTextFromImage() {
firebaseVisionImage = FirebaseVisionImage.fromBitmap(imageBitmap);
textDetector = FirebaseVision.getInstance().getVisionTextDetector();
textDetector.detectInImage(firebaseVisionImage).addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() {
#Override
public void onSuccess(FirebaseVisionText firebaseVisionText) {
//speakTextFromImage(firebaseVisionText);
getImgText(firebaseVisionText);
}
}).addOnFailureListener(new OnFailureListener() {
#SuppressLint("SetTextI18n")
#Override
public void onFailure(#NonNull Exception e) {
tts.speak("Something went wrong. Please try again later or try with another image.", TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("Something went wrong. Please try again later or try with another image.");
}
});
}
#SuppressLint("SetTextI18n")
private void getImgText(FirebaseVisionText firebaseVisionText){
List<FirebaseVisionText.Block> blockList = firebaseVisionText.getBlocks();
if(blockList.size() == 0) {
tts.speak("I think this image contains no text.", TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("I think this image contains no text.");
}else{
for(FirebaseVisionText.Block block : firebaseVisionText.getBlocks()){
imgText = block.getText().toString();
tts.speak("The text in the image is as follows : " + imgText, TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("The text in the image is as follows : " + imgText);
}
}
}
public void recieve_data(){
ServerInput = "Java client is successfully connected with the server ";
BackgroundTask bt = new BackgroundTask();
bt.execute(ServerInput);
}
class BackgroundTask extends AsyncTask<String, Void, Void>{
#Override
protected Void doInBackground(String... voids) {
try{
String message = voids[0];
socket = new Socket("myIP", 24224);
writer = new PrintWriter(socket.getOutputStream());
writer.write(message);
writer.flush();
writer.close();
socket.close();
}catch (IOException e){
e.printStackTrace();
}
return null;
}
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onResume() {
super.onResume();
}
}
This is my python server code:
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("Socket successfully created")
try:
port = 24224
s.bind(("", port))
print("socket binded to %s" %(port))
except socket.error as err:
print('Bind failed. Error Code : ' .format(err))
s.listen(10)
while True:
conn, addr = s.accept()
print('Got connection from', addr)
message = conn.recv(1024)
print("Client : " + message)
conn.close()
Now the run view in the android studio:
E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.maitreyastudio.ai, PID: 17690
java.lang.RuntimeException: Unable to start activity ComponentInfo{com.maitreyastudio.ai/com.maitreyastudio.ai.MainActivity}: android.os.NetworkOnMainThreadException
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2724)
at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2789)
at android.app.ActivityThread.-wrap12(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1527)
at android.os.Handler.dispatchMessage(Handler.java:110)
at android.os.Looper.loop(Looper.java:203)
at android.app.ActivityThread.main(ActivityThread.java:6251)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1063)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:924)
Caused by: android.os.NetworkOnMainThreadException
at android.os.StrictMode$AndroidBlockGuardPolicy.onNetwork(StrictMode.java:1318)
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:340)
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:196)
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:178)
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:356)
at java.net.Socket.connect(Socket.java:616)
at java.net.Socket.connect(Socket.java:548)
at java.net.Socket.<init>(Socket.java:440)
at java.net.Socket.<init>(Socket.java:223)
at com.maitreyastudio.ai.MainActivity.onCreate(MainActivity.java:127)
at android.app.Activity.performCreate(Activity.java:6712)
at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1118)
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2677)
at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2789)
at android.app.ActivityThread.-wrap12(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1527)
at android.os.Handler.dispatchMessage(Handler.java:110)
at android.os.Looper.loop(Looper.java:203)
at android.app.ActivityThread.main(ActivityThread.java:6251)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1063)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:924)
Looking at the stack trace I see that you are trying to connect on the ui thread which
is causing the crash. You need to move the connection logic in to its own thread
Here is a link to the documentation that will help you
https://developer.android.com/guide/components/processes-and-threads#Threads
try this for main activity
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Application;
import android.content.ActivityNotFoundException;
import android.content.ContentValues;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.media.Image;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.provider.MediaStore;
import android.provider.Settings;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.speech.tts.TextToSpeech;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.GridLayout;
import android.widget.TextView;
import com.chaquo.python.PyObject;
import com.chaquo.python.Python;
import com.chaquo.python.android.AndroidPlatform;
import com.google.android.gms.location.FusedLocationProviderClient;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.ml.vision.FirebaseVision;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import com.google.firebase.ml.vision.text.FirebaseVisionText;
import com.google.firebase.ml.vision.text.FirebaseVisionTextDetector;
import org.w3c.dom.Text;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.net.*;
import java.io.*;
import static android.Manifest.permission.ACCESS_FINE_LOCATION;
import static android.Manifest.permission.CAMERA;
import static android.Manifest.permission.READ_EXTERNAL_STORAGE;
import static android.Manifest.permission.RECORD_AUDIO;
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private Button btnRecognize;
private SpeechRecognizer speechRecognizer;
static EditText ET_ShowRecognized;
String locality;
private Intent intent;
private String ET_ShowRecognizedText;
private String ProcessingText;
//private FusedLocationProviderClient fusedLocationProviderClient;
//Geocoder geocoder;
Python py;
PyObject pyobj;
PyObject obj;
String currentDate;
String currentTime;
static TextToSpeech tts;
Uri imageURI;
ContentValues contentValues;
Intent cameraIntent;
static final int REQUEST_IMAGE_CAPTURE = 1;
Image mediaImage;
FirebaseVisionImage firebaseVisionImage;
static Bitmap imageBitmap;
FirebaseVisionTextDetector textDetector;
String imgText;
Intent CameraIntent;
static Thread sent;
static Thread receive;
static Socket socket;
InputStreamReader in;
BufferedReader bf;
String ServerOutput;
PrintWriter writer;
String ServerInput;
#SuppressLint({"SetTextI18n", "ClickableViewAccessibility", "MissingPermission"})
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, new String[]{RECORD_AUDIO, WRITE_EXTERNAL_STORAGE, READ_EXTERNAL_STORAGE, ACCESS_FINE_LOCATION, CAMERA}, PackageManager.PERMISSION_GRANTED);
ET_ShowRecognized = findViewById(R.id.ET_ShowRecognized);
btnRecognize = findViewById(R.id.btnRecognize);
/*fusedLocationProviderClient.getLastLocation().addOnCompleteListener(new OnCompleteListener<Location>() {
#Override
public void onComplete(#NonNull Task<Location> task) {
Location location = task.getResult();
if(location != null){
geocoder = new Geocoder(MainActivity.this, Locale.getDefault());
try {
List<Address> address = geocoder.getFromLocation(location.getLatitude(), location.getLongitude(), 1);
locality = address.get(0).getLocality();
} catch (IOException e) {
;
}
}
}
});
if(!Python.isStarted()){
Python.start(new AndroidPlatform(this));
}
py = Python.getInstance();
pyobj = py.getModule("WolframAlpha");
obj = pyobj.callAttr("main", locality);*/
tts = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int i) {
if (i == TextToSpeech.SUCCESS) {
tts.setLanguage(Locale.ENGLISH);
}
tts.speak("Hi you successfully ran me.", TextToSpeech.QUEUE_FLUSH, null, null);
tts.speak("Seems good to meet you.", TextToSpeech.QUEUE_FLUSH, null, null);
}
});
//currentDate = new SimpleDateFormat("dd-MM-yyyy", Locale.getDefault()).format(new Date());
//currentTime = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()).format(new Date());
//textToSpeech.speak("Hi! I am your personal assistant. Today date is something something ", TextToSpeech.QUEUE_FLUSH, null, null);
//Speak("Today's weather forecast for the current location is " + obj.toString());
intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new RecognitionListener() {
#Override
public void onReadyForSpeech(Bundle bundle) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float v) {
}
#Override
public void onBufferReceived(byte[] bytes) {
}
#Override
public void onEndOfSpeech() {
}
#Override
public void onError(int i) {
}
#Override
public void onResults(Bundle bundle) {
ArrayList<String> mathches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
if (mathches != null) {
ET_ShowRecognized.setText(mathches.get(0));
process();
}
}
#Override
public void onPartialResults(Bundle bundle) {
}
#Override
public void onEvent(int i, Bundle bundle) {
}
});
btnRecognize.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View view, MotionEvent motionEvent) {
switch (motionEvent.getAction()) {
case MotionEvent.ACTION_UP:
speechRecognizer.stopListening();
break;
case MotionEvent.ACTION_DOWN:
ET_ShowRecognized.setText(null);
ET_ShowRecognized.setText("Listening...");
speechRecognizer.startListening(intent);
break;
default:
break;
}
return false;
}
});
}
public void process() {
ProcessingText = ET_ShowRecognized.getText().toString().toLowerCase();
if(ProcessingText.contains("hello")) {
tts.speak("Hi! I hope all is well.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("hi")){
tts.speak("Hello! Nice to meet you.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("your name")){
tts.speak("My name is assistant.", TextToSpeech.QUEUE_FLUSH, null, null);
}
else if(ProcessingText.contains("recognise text")){
tts.speak("Opening Camera.", TextToSpeech.QUEUE_FLUSH, null, null);
dispatchTakePictureIntent();
}
else if(ProcessingText.contains("bye")){
finish();
System.exit(0);
}
else if(ProcessingText.contains("current temperature")){
sendTemp();
recieve_data();
}else {
tts.speak(ProcessingText, TextToSpeech.QUEUE_FLUSH, null, null);
}
}
private void dispatchTakePictureIntent() {
CameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
try {
startActivityForResult(CameraIntent, REQUEST_IMAGE_CAPTURE);
} catch (ActivityNotFoundException e) {
// display error state to the user
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
Bundle extras = data.getExtras();
imageBitmap = (Bitmap) extras.get("data");
//imageView.setImageBitmap(imageBitmap);
detectTextFromImage();
}
}
private void detectTextFromImage() {
firebaseVisionImage = FirebaseVisionImage.fromBitmap(imageBitmap);
textDetector = FirebaseVision.getInstance().getVisionTextDetector();
textDetector.detectInImage(firebaseVisionImage).addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() {
#Override
public void onSuccess(FirebaseVisionText firebaseVisionText) {
//speakTextFromImage(firebaseVisionText);
getImgText(firebaseVisionText);
}
}).addOnFailureListener(new OnFailureListener() {
#SuppressLint("SetTextI18n")
#Override
public void onFailure(#NonNull Exception e) {
tts.speak("Something went wrong. Please try again later or try with another image.", TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("Something went wrong. Please try again later or try with another image.");
}
});
}
#SuppressLint("SetTextI18n")
private void getImgText(FirebaseVisionText firebaseVisionText){
List<FirebaseVisionText.Block> blockList = firebaseVisionText.getBlocks();
if(blockList.size() == 0) {
tts.speak("I think this image contains no text.", TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("I think this image contains no text.");
}else{
for(FirebaseVisionText.Block block : firebaseVisionText.getBlocks()){
imgText = block.getText().toString();
tts.speak("The text in the image is as follows : " + imgText, TextToSpeech.QUEUE_FLUSH, null, null);
ET_ShowRecognized.setText("The text in the image is as follows : " + imgText);
}
}
}
public void recieve_data(){
ServerInput = "Java client is successfully connected with the server ";
BackgroundTask bt = new BackgroundTask();
bt.execute(ServerInput);
}
public void sendTemp(){
new TempBackgroundTask().execute();
}
class TempBackgroundTask extends AsyncTask<Void, String, Void>{
#Override
protected Void doInBackground(String... voids) {
try {
socket = new Socket("myIP",12345);
} catch (UnknownHostException e1) {
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
sent = new Thread(new Runnable(){
#Override
public void run() {
try {
bf = new BufferedReader(new InputStreamReader(socket.getInputStream()));
while(true){
ServerOutput = bf.readLine().toString();
publishProgress(ServerOutput);
}
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
sent.start();
try {
sent.join();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
#SuppressWarnings("unchecked")
#Override
protected void onProgressUpdate(String... text) {
MainActivity.tts.speak(text[0], TextToSpeech.QUEUE_FLUSH, null, null);
MainActivity.ET_ShowRecognized.setText(text[0]);
}
}
class BackgroundTask extends AsyncTask<String, Void, Void>{
#Override
protected Void doInBackground(String... voids) {
try{
String message = voids[0];
socket = new Socket("192.168.43.203", 24224);
writer = new PrintWriter(socket.getOutputStream());
writer.write(message);
writer.flush();
writer.close();
socket.close();
}catch (IOException e){
e.printStackTrace();
}
return null;
}
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onResume() {
super.onResume();
}
}
Hello greetings and salutation.
i have two different Android codes that almost work the same way. The first code Capture image from gallery and upload it with title to php server while the second code capture image from gallery and when resize button is pressed the image is compressed both size and memory is reduced but also maintaining the same picture Quality. Let me Upload / paste the two codes.
But my question is
how will i make the second code upload to server after it have compressed the image memory? or how will i make the first code before uploading to server let it compress it like code two ?
The first code
package net.simplifiedcoding.androiduploadimage;
import android.app.ProgressDialog;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.AsyncTask;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Base64;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private Button buttonUpload;
private Button buttonChoose;
private EditText editText;
private ImageView imageView;
public static final String KEY_IMAGE = "image";
public static final String KEY_TEXT = "name";
public static final String UPLOAD_URL = "http://simplifiedcoding.16mb.com/PhotoUploadWithText/upload.php";
private int PICK_IMAGE_REQUEST = 1;
private Bitmap bitmap;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
buttonUpload = (Button) findViewById(R.id.buttonUpload);
buttonChoose = (Button) findViewById(R.id.buttonChooseImage);
editText = (EditText) findViewById(R.id.editText);
imageView = (ImageView) findViewById(R.id.imageView);
buttonChoose.setOnClickListener(this);
buttonUpload.setOnClickListener(this);
}
private void showFileChooser() {
Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Picture"), PICK_IMAGE_REQUEST);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == PICK_IMAGE_REQUEST && resultCode == RESULT_OK && data != null && data.getData() != null) {
Uri filePath = data.getData();
try {
bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), filePath);
imageView.setImageBitmap(bitmap);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public String getStringImage(Bitmap bmp){
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 100, baos);
byte[] imageBytes = baos.toByteArray();
String encodedImage = Base64.encodeToString(imageBytes, Base64.DEFAULT);
return encodedImage;
}
public void uploadImage(){
final String text = editText.getText().toString().trim();
final String image = getStringImage(bitmap);
class UploadImage extends AsyncTask<Void,Void,String>{
ProgressDialog loading;
#Override
protected void onPreExecute() {
super.onPreExecute();
loading = ProgressDialog.show(MainActivity.this,"Please wait...","uploading",false,false);
}
#Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
loading.dismiss();
Toast.makeText(MainActivity.this,s,Toast.LENGTH_LONG).show();
}
#Override
protected String doInBackground(Void... params) {
RequestHandler rh = new RequestHandler();
HashMap<String,String> param = new HashMap<String,String>();
param.put(KEY_TEXT,text);
param.put(KEY_IMAGE,image);
String result = rh.sendPostRequest(UPLOAD_URL, param);
return result;
}
}
UploadImage u = new UploadImage();
u.execute();
}
#Override
public void onClick(View v) {
if(v == buttonChoose){
showFileChooser();
}
if(v == buttonUpload){
uploadImage();
}
}
}
The second Code
package id.zelory.compressor.sample;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.Random;
import id.zelory.compressor.Compressor;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
public class MainActivity extends AppCompatActivity {
private static final int PICK_IMAGE_REQUEST = 1;
private ImageView actualImageView;
private ImageView compressedImageView;
private TextView actualSizeTextView;
private TextView compressedSizeTextView;
private File actualImage;
private File compressedImage;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
actualImageView = (ImageView) findViewById(R.id.actual_image);
compressedImageView = (ImageView) findViewById(R.id.compressed_image);
actualSizeTextView = (TextView) findViewById(R.id.actual_size);
compressedSizeTextView = (TextView) findViewById(R.id.compressed_size);
actualImageView.setBackgroundColor(getRandomColor());
clearImage();
}
public void chooseImage(View view) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("image/*");
startActivityForResult(intent, PICK_IMAGE_REQUEST);
}
public void compressImage(View view) {
if (actualImage == null) {
showError("Please choose an image!");
} else {
// Compress image in main thread
//compressedImage = new Compressor(this).compressToFile(actualImage);
//setCompressedImage();
// Compress image to bitmap in main thread
//compressedImageView.setImageBitmap(new Compressor(this).compressToBitmap(actualImage));
// Compress image using RxJava in background thread
new Compressor(this)
.compressToFileAsFlowable(actualImage)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<File>() {
#Override
public void accept(File file) {
compressedImage = file;
setCompressedImage();
}
}, new Consumer<Throwable>() {
#Override
public void accept(Throwable throwable) {
throwable.printStackTrace();
showError(throwable.getMessage());
}
});
}
}
public void customCompressImage(View view) {
if (actualImage == null) {
showError("Please choose an image!");
} else {
// Compress image in main thread using custom Compressor
try {
compressedImage = new Compressor(this)
.setMaxWidth(640)
.setMaxHeight(480)
.setQuality(75)
.setCompressFormat(Bitmap.CompressFormat.WEBP)
.setDestinationDirectoryPath(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath())
.compressToFile(actualImage);
setCompressedImage();
} catch (IOException e) {
e.printStackTrace();
showError(e.getMessage());
}
// Compress image using RxJava in background thread with custom Compressor
/*new Compressor(this)
.setMaxWidth(640)
.setMaxHeight(480)
.setQuality(75)
.setCompressFormat(Bitmap.CompressFormat.WEBP)
.setDestinationDirectoryPath(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath())
.compressToFileAsFlowable(actualImage)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<File>() {
#Override
public void accept(File file) {
compressedImage = file;
setCompressedImage();
}
}, new Consumer<Throwable>() {
#Override
public void accept(Throwable throwable) {
throwable.printStackTrace();
showError(throwable.getMessage());
}
});*/
}
}
private void setCompressedImage() {
compressedImageView.setImageBitmap(BitmapFactory.decodeFile(compressedImage.getAbsolutePath()));
compressedSizeTextView.setText(String.format("Size : %s", getReadableFileSize(compressedImage.length())));
Toast.makeText(this, "Compressed image save in " + compressedImage.getPath(), Toast.LENGTH_LONG).show();
Log.d("Compressor", "Compressed image save in " + compressedImage.getPath());
}
private void clearImage() {
actualImageView.setBackgroundColor(getRandomColor());
compressedImageView.setImageDrawable(null);
compressedImageView.setBackgroundColor(getRandomColor());
compressedSizeTextView.setText("Size : -");
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == PICK_IMAGE_REQUEST && resultCode == RESULT_OK) {
if (data == null) {
showError("Failed to open picture!");
return;
}
try {
actualImage = FileUtil.from(this, data.getData());
actualImageView.setImageBitmap(BitmapFactory.decodeFile(actualImage.getAbsolutePath()));
actualSizeTextView.setText(String.format("Size : %s", getReadableFileSize(actualImage.length())));
clearImage();
} catch (IOException e) {
showError("Failed to read picture data!");
e.printStackTrace();
}
}
}
public void showError(String errorMessage) {
Toast.makeText(this, errorMessage, Toast.LENGTH_SHORT).show();
}
private int getRandomColor() {
Random rand = new Random();
return Color.argb(100, rand.nextInt(256), rand.nextInt(256), rand.nextInt(256));
}
public String getReadableFileSize(long size) {
if (size <= 0) {
return "0";
}
final String[] units = new String[]{"B", "KB", "MB", "GB", "TB"};
int digitGroups = (int) (Math.log10(size) / Math.log10(1024));
return new DecimalFormat("#,##0.#").format(size / Math.pow(1024, digitGroups)) + " " + units[digitGroups];
}
}
Thanks seriously need this to be solved
public String getStringImage(Bitmap bmp){
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 100, baos);
byte[] imageBytes = baos.toByteArray();
String encodedImage = Base64.encodeToString(imageBytes, Base64.DEFAULT);
return encodedImage;
}
to this
public String getStringImage(Bitmap bmp){
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 70, baos);
byte[] imageBytes = baos.toByteArray();
String encodedImage = Base64.encodeToString(imageBytes, Base64.DEFAULT);
return encodedImage;
}
this will work
why when i trying to upload image with size 1.5 mb it's said error while uploading, but actually the image is successfully upload? and if i trying to upload with size 100 kb it's said Image Uploaded Successfully
WC_Activity.java
package com.emergency.e_place;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.kosalgeek.android.photoutil.CameraPhoto;
import com.kosalgeek.android.photoutil.GalleryPhoto;
import com.kosalgeek.android.photoutil.ImageBase64;
import com.kosalgeek.android.photoutil.ImageLoader;
import com.kosalgeek.genasync12.AsyncResponse;
import com.kosalgeek.genasync12.EachExceptionsHandler;
import com.kosalgeek.genasync12.PostResponseAsyncTask;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.util.HashMap;
/**
* Created by Eggy on 5/3/2016.
*/
public class WC_Activity extends AppCompatActivity {
final String TAGS = "DEBUG";
String Latitude;
String Longitude;
private final String TAG = this.getClass().getName();
ImageView ivCamera, ivGallery, ivUpload, ivImage;
CameraPhoto cameraPhoto;
GalleryPhoto galleryPhoto;
final int CAMERA_REQUEST = 13323;
final int GALLERY_REQUEST = 22131;
String selectedPhoto;
EditText etIpAddress;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_wc);
Toolbar toolbar=(Toolbar)findViewById(R.id.toolbarWC);
setSupportActionBar(toolbar);
//ambil lokasi dari MainActivity
Intent myIntent = getIntent(); // gets the previously created intent
Latitude = myIntent.getStringExtra("Latitude"); // will return "FirstKeyValue"
Longitude= myIntent.getStringExtra("Longitude"); // will return "SecondKeyValue"
Log.d(TAGS, "onLocationChanged: " + Longitude);
etIpAddress = (EditText)findViewById(R.id.etIpAddress);
cameraPhoto = new CameraPhoto(getApplicationContext());
galleryPhoto = new GalleryPhoto(getApplicationContext());
ivImage = (ImageView)findViewById(R.id.ivImage);
ivCamera = (ImageView)findViewById(R.id.ivCamera);
ivGallery = (ImageView)findViewById(R.id.ivGallery);
ivUpload = (ImageView)findViewById(R.id.ivUpload);
ivCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
startActivityForResult(cameraPhoto.takePhotoIntent(), CAMERA_REQUEST);
cameraPhoto.addToGallery();
} catch (IOException e) {
Toast.makeText(getApplicationContext(),
"Something Wrong while taking photos", Toast.LENGTH_SHORT).show();
}
}
});
ivGallery.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startActivityForResult(galleryPhoto.openGalleryIntent(), GALLERY_REQUEST);
}
});
ivUpload.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if(selectedPhoto == null || selectedPhoto.equals("")){
Toast.makeText(getApplicationContext(), "No Image Selected.", Toast.LENGTH_SHORT).show();
return;
}
try {
Bitmap bitmap = ImageLoader.init().from(selectedPhoto).requestSize(1024, 1024).getBitmap();
String encodedImage = ImageBase64.encode(bitmap);
Log.d(TAG, encodedImage);
HashMap<String, String> postData = new HashMap<String, String>();
postData.put("image", encodedImage);
PostResponseAsyncTask task = new PostResponseAsyncTask(WC_Activity.this, postData, new AsyncResponse() {
#Override
public void processFinish(String s) {
Log.d(TAG, s);
if(s.contains("uploaded_success")){
Toast.makeText(getApplicationContext(), "Image Uploaded Successfully.",
Toast.LENGTH_SHORT).show();
}
else{
Toast.makeText(getApplicationContext(), "Error while uploading.",
Toast.LENGTH_SHORT).show();
}
}
});
String ip = etIpAddress.getText().toString();
task.execute("http://" +ip + "/AndroidUpload/upload.php");
task.setEachExceptionsHandler(new EachExceptionsHandler() {
#Override
public void handleIOException(IOException e) {
Toast.makeText(getApplicationContext(), "Cannot Connect to Server.",
Toast.LENGTH_SHORT).show();
}
#Override
public void handleMalformedURLException(MalformedURLException e) {
Toast.makeText(getApplicationContext(), "URL Error.",
Toast.LENGTH_SHORT).show();
}
#Override
public void handleProtocolException(ProtocolException e) {
Toast.makeText(getApplicationContext(), "Protocol Error.",
Toast.LENGTH_SHORT).show();
}
#Override
public void handleUnsupportedEncodingException(UnsupportedEncodingException e) {
Toast.makeText(getApplicationContext(), "Encoding Error.",
Toast.LENGTH_SHORT).show();
}
});
} catch (FileNotFoundException e) {
Toast.makeText(getApplicationContext(),
"Something Wrong while encoding photos", Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode == RESULT_OK){
if(requestCode == CAMERA_REQUEST){
String photoPath = cameraPhoto.getPhotoPath();
selectedPhoto = photoPath;
Bitmap bitmap = null;
try {
bitmap = ImageLoader.init().from(photoPath).requestSize(512, 512).getBitmap();
ivImage.setImageBitmap(getRotatedBitmap(bitmap, 90));
} catch (FileNotFoundException e) {
Toast.makeText(getApplicationContext(),
"Something Wrong while loading photos", Toast.LENGTH_SHORT).show();
}
}
else if(requestCode == GALLERY_REQUEST){
Uri uri = data.getData();
galleryPhoto.setPhotoUri(uri);
String photoPath = galleryPhoto.getPath();
selectedPhoto = photoPath;
try {
Bitmap bitmap = ImageLoader.init().from(photoPath).requestSize(512, 512).getBitmap();
ivImage.setImageBitmap(bitmap);
} catch (FileNotFoundException e) {
Toast.makeText(getApplicationContext(),
"Something Wrong while choosing photos", Toast.LENGTH_SHORT).show();
}
}
}
}
private Bitmap getRotatedBitmap(Bitmap source, float angle){
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap bitmap1 = Bitmap.createBitmap(source,
0, 0, source.getWidth(), source.getHeight(), matrix, true);
return bitmap1;
}
}
I am working on an android application that tags photo on image, which using the Face Detection. This application just like tag photo on Facebook after Facebook detected the faces in a photo.
My main problem is, I do not know how to return the faceID and do the image tagging and I did a lot of trying, but did not success. Here is my Face Detection coding.
package com.application.nurfatinahjannah.muka;
import android.os.Bundle;
import com.microsoft.projectoxford.face.*;
import com.microsoft.projectoxford.face.contract.*;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.view.Menu;
import android.view.MenuItem;
import java.io.*;
import android.app.*;
import android.content.*;
import android.net.*;
import android.os.*;
import android.view.*;
import android.graphics.*;
import android.widget.*;
import android.provider.*;
public class CameraActivity extends Second {
ImageButton takephoto;
private FaceServiceClient faceServiceClient =
new FaceServiceRestClient("09eec022662e429ba6f2df36454ff120");
private final int PICK_IMAGE = 1;
private ProgressDialog detectionProgressDialog;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
takephoto = (ImageButton)findViewById(R.id.takephoto);
takephoto.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View v)
{
Intent gallIntent = new Intent(Intent.ACTION_GET_CONTENT);
gallIntent.setType("image/*");
startActivityForResult(Intent.createChooser(gallIntent, "Select Picture"), PICK_IMAGE);
}
});
detectionProgressDialog = new ProgressDialog(this);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == PICK_IMAGE && resultCode == RESULT_OK && data != null && data.getData() != null) {
Uri uri = data.getData();
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
ImageView imageView = (ImageView) findViewById(R.id.imageview1);
imageView.setImageBitmap(bitmap);
detectAndFrame(bitmap);
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void detectAndFrame(final Bitmap imageBitmap)
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
imageBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
ByteArrayInputStream inputStream =
new ByteArrayInputStream(outputStream.toByteArray());
AsyncTask<InputStream, String, Face[]> detectTask = new AsyncTask<InputStream, String, Face[]>()
{
#Override
protected Face[] doInBackground(InputStream... params) {
try {
publishProgress("Detecting...");
Face[] result = faceServiceClient.detect(
params[0],
true , // returnFaceId
false, // returnFaceLandmarks
null // returnFaceAttributes: a string like "age, gender"
);
if (result == null)
{
publishProgress("Detection Finished. Nothing detected");
return null;
}
publishProgress(
String.format("Detection Finished. %d face(s) detected",
result.length));
return result;
} catch (Exception e) {
publishProgress("Detection failed");
return null;
}
}
#Override
protected void onPreExecute() {
detectionProgressDialog.show();
}
#Override
protected void onProgressUpdate(String... progress) {
detectionProgressDialog.setMessage(progress[0]);
}
#Override
protected void onPostExecute(Face[] result) {
detectionProgressDialog.dismiss();
if (result == null) return;
ImageView imageView = (ImageView)findViewById(R.id.imageview1);
imageView.setImageBitmap(drawFaceRectanglesOnBitmap(imageBitmap, result));
imageBitmap.recycle();
}
};
detectTask.execute(inputStream);
}
private static Bitmap drawFaceRectanglesOnBitmap(Bitmap originalBitmap, Face[] faces) {
Bitmap bitmap = originalBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.RED);
int stokeWidth = 2;
paint.setStrokeWidth(stokeWidth);
if (faces != null) {
for (Face face : faces) {
FaceRectangle faceRectangle = face.faceRectangle;
canvas.drawRect(
faceRectangle.left,
faceRectangle.top,
faceRectangle.left + faceRectangle.width,
faceRectangle.top + faceRectangle.height,
paint);
}
}
return bitmap;
}
}