Crop Face Using Image Detection in Android - java

I am trying to detect faces and crop the face part in rectangular Image. I have done the face detection Part, but still not finding any help about how to crop the face part. Please have a look on my code..!
public class FaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
int cropXinit = 0;
int cropYint = 0;
int cropXend = 0;
int cropYend = 0;
Bitmap cropedBitmap;
Bitmap b;
private static final int MAX_FACES = 1;
private static String TAG = "FaceDetect";
private static boolean DEBUG = false;
protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler() {
// #Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
// load the photo
b = ChooseActivity.bitmap;
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();
setFace();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face[] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int[] fpx = null;
int[] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];
for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[2 * i] = (int) (eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int) eyescenter.y;
// set up right eye location
fpx[2 * i + 1] = (int) (eyescenter.x + eyesdist / 2);
fpy[2 * i + 1] = (int) eyescenter.y;
if (DEBUG)
Log.e(TAG,
"setFace(): face "
+ i
+ ": confidence = "
+ faces[i].confidence()
+ ", eyes distance = "
+ faces[i].eyesDistance()
+ ", pose = ("
+ faces[i]
.pose(FaceDetector.Face.EULER_X)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Y)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Z)
+ ")" + ", eyes midpoint = ("
+ eyescenter.x + "," + eyescenter.y
+ ")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
// if(eyescenter.x -eyesdist >= 0)
// {
// cropXinit = (int) (eyescenter.x -eyesdist) ;
// }
// else
// {
// cropXinit = 0;
// }
// if(eyescenter.x +eyesdist <= mFaceWidth)
// {
// cropXend = (int) (eyescenter.x +eyesdist) ;
// }
// else
// {
// cropXend = mFaceWidth;
// }
// if(eyescenter.y +eyesdist*2 <= mFaceHeight)
// {
// cropYend = (int) (eyescenter.y +eyesdist*2) ;
// }
// else
// {
// cropYend = mFaceHeight;
// }
// if(eyescenter.y -eyesdist >= 0)
// {
// cropYint = (int) (eyescenter.y -eyesdist) ;
// }
// else
// {
// cropYint = 0;
// }
// mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend,cropYend));
}
}

createBitmap(Bitmap source, int x, int y, int width, int height) receives a start X and start Y, and a width and height value, not an end X and end Y. If you change your commented-out code to this it should work:
mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend-cropXinit,cropYend-cropYinit));

Related

Barchart doesn't show my values when filtered

I don't know if I'm using the correct way of filtering a barchart, or even if my code is properly written.
But I have problem, when I open my dashboard, I have 3 charts, one of the piecharts can filter the barchart, and depending on which was selected, the barchart only shows the piechart selected, otherwise shows all.
My problem is when I filter those, I'm getting the correct filter, but I can't get the labels to appear, they just appear when I got only one, for example if I have 1Yellow, 15 Reds, its only a bar to show, but it only shows the label for the yellow one, but if yellow2 and 15 reds, it doesn't show any of them.
It should be a easy fix, I just can't see where is the error.
ArrayList<Integer> colors_to_add_type = new ArrayList<>();
ArrayList<Integer> colors_to_add_type_MINOR = new ArrayList<>();
ArrayList<Integer> colors_to_add_type_MEDIUM = new ArrayList<>();
ArrayList<Integer> colors_to_add_type_MAJOR = new ArrayList<>();
ArrayList<Integer> colors_to_add_type_OTHER = new ArrayList<>();
Iterator<String> defects_colors_keys;
defects_colors_keys = colors_type.keys();
while (defects_colors_keys.hasNext()){
String key = defects_colors_keys.next();
Log.e("COLOR_KEY", "onResponse: " + key );
int to_add = Color.parseColor(colors_type.getString(key));
int to_add_MINOR = Color.parseColor("#00FF00");
int to_add_MEDIUM = Color.parseColor("#FFFF00");
int to_add_MAJOR = Color.parseColor("#FF0000");
int to_add_OTHER = Color.parseColor("#800080");
Log.e("COLOR_NAME", "onResponse: " + colors_type.getString(key));
colors_to_add_type.add(to_add);
colors_to_add_type_MAJOR.add(to_add_MAJOR);
colors_to_add_type_MEDIUM.add(to_add_MEDIUM);
colors_to_add_type_MINOR.add(to_add_MINOR);
colors_to_add_type_OTHER.add(to_add_OTHER);
}
List<LegendEntry> legends_type = new ArrayList<>();
List<LegendEntry> legends_type_MINOR = new ArrayList<>();
List<LegendEntry> legends_type_MEDIUM = new ArrayList<>();
List<LegendEntry> legends_type_MAJOR = new ArrayList<>();
List<LegendEntry> legends_type_OTHER = new ArrayList<>();
type_of_defects = by_type.keys();
int c = 0;
if(!map_defects_by_type_count.isEmpty()){
map_defects_by_type_count.clear();
}
Log.e("DEFECTS", defects_list.toString());
Log.e("DEFECTS",String.valueOf(defects_list.size()));
while(type_of_defects.hasNext()) {
String key = type_of_defects.next();
Log.e("DEFECTS_type", key.toString());
map_defects_by_type_count.put(key, by_type.getInt(key));
map_get_defect_type.put(c, key);
LegendEntry legendEntry = new LegendEntry();
legendEntry.label = key;
legendEntry.formColor = colors_to_add_type.get(c);
legends_type.add(legendEntry);
arr_by_type.add(new BarEntry((float)c + 1, (int)by_type.getInt(key), (String)key));
c++;
for (int i = 0; i < defects_list.size(); i++){
if (defects_list.get(i).getStringProperty("defect").equals(key)){
if (defects_list.get(i).getStringProperty("criticality").equals("MAJOR")){
arr_by_type_MAJOR.add(new BarEntry((float)c +1, (int)by_type.getInt(key), (String)key));
legends_type_MAJOR.add(legendEntry);
break;
}
if (defects_list.get(i).getStringProperty("criticality").equals("MEDIUM")){
arr_by_type_MEDIUM.add(new BarEntry((float)c +1 , (int)by_type.getInt(key), (String)key));
legends_type_MEDIUM.add(legendEntry);
break;
}
if (defects_list.get(i).getStringProperty("criticality").equals("MINOR")){
arr_by_type_MINOR.add(new BarEntry((float)c +1, (int)by_type.getInt(key), (String)key));
legends_type_MINOR.add(legendEntry);
break;
}
if (defects_list.get(i).getStringProperty("criticality").equals("OTHER")){
arr_by_type_OTHER.add(new BarEntry((float)c +1, (int)by_type.getInt(key), (String)key));
legends_type_OTHER.add(legendEntry);
break;
}
}
}
}
TOTAL_DEFECTS = TOTAL_BY_CRIT;
if(TOTAL_BY_CRIT != 0){
if(MAJOR != 0){
major_percent = (float) MAJOR / TOTAL_BY_CRIT;
}
if(MEDIUM != 0){
medium_percent = (float) MEDIUM / TOTAL_BY_CRIT;
}
if(MINOR != 0){
minor_percent = (float) MINOR / TOTAL_BY_CRIT;
}
if(OTHER != 0){
other_percent = (float) OTHER / TOTAL_BY_CRIT;
}
}
TOTAL_SOURCE = TOTAL_H_M;
if(TOTAL_H_M != 0){
if(HUMAN != 0){
human_percent = (float) HUMAN / TOTAL_H_M;
}
if(MACHINE != 0){
machine_percent = (float) MACHINE / TOTAL_H_M;
}
}
arr_by_defect.add(new PieEntry(MAJOR, "MAJOR"));
arr_by_defect.add(new PieEntry(MEDIUM, "MEDIUM"));
arr_by_defect.add(new PieEntry(MINOR, "MINOR"));
arr_by_defect.add(new PieEntry(OTHER, "OTHER"));
arr_by_source.add(new PieEntry(HUMAN, "HUMAN"));
arr_by_source.add(new PieEntry(MACHINE, "MACHINE"));
dataSet_by_defect = new PieDataSet(arr_by_defect, "");
dataSet_by_source = new PieDataSet(arr_by_source, "");
dataSet_by_type = new BarDataSet(arr_by_type, "");
dataSet_by_type_MAJOR = new BarDataSet(arr_by_type_MAJOR, "");
dataSet_by_type_MEDIUM = new BarDataSet(arr_by_type_MEDIUM, "");
dataSet_by_type_MINOR = new BarDataSet(arr_by_type_MINOR, "");
dataSet_by_type_OTHER = new BarDataSet(arr_by_type_OTHER, "");
//GET THE COLORS
String colorMajor = colors.getString("MAJOR");
String colorMedium = colors.getString("MEDIUM");
String colorMinor = colors.getString("MINOR");
String colorOther = colors.getString("OTHER");
String colorHuman = human_machine_colors.getString("human");
String colorMachine = human_machine_colors.getString("machine");
final int[] sliceColors = {Color.parseColor(colorMajor), Color.parseColor(colorMedium), Color.parseColor(colorMinor), Color.parseColor(colorOther)};
ArrayList<Integer> colorsSet = new ArrayList<>();
for (int color : sliceColors){
colorsSet.add(color);
}
Log.e("COLORS_TO ADD", "MAJOR" + colorMajor);
Log.e("COLORS_TO ADD", "MEDIUM" + colorMedium);
Log.e("COLORS_TO ADD", "MINOR" + colorMinor);
Log.e("COLORS_TO ADD", "OTHER" + colorOther);
dataSet_by_defect.setColors(colorsSet);
final int[] sliceColors_hm = {Color.parseColor(colorHuman), Color.parseColor(colorMachine)};
ArrayList<Integer> colorsSet_hm = new ArrayList<>();
for (int color : sliceColors_hm){
colorsSet_hm.add(color);
}
dataSet_by_source.setColors(colorsSet_hm);
dataSet_by_type.setColors(colors_to_add_type);
dataSet_by_type_MINOR.setColors(colors_to_add_type_MINOR);
dataSet_by_type_MEDIUM.setColors(colors_to_add_type_MEDIUM);
dataSet_by_type_MAJOR.setColors(colors_to_add_type_MAJOR);
dataSet_by_type_OTHER.setColors(colors_to_add_type_OTHER);
dataSet_by_type.setAxisDependency(YAxis.AxisDependency.RIGHT);
dataSet_by_type_MINOR.setAxisDependency(YAxis.AxisDependency.RIGHT);
dataSet_by_type_MEDIUM.setAxisDependency(YAxis.AxisDependency.RIGHT);
dataSet_by_type_MAJOR.setAxisDependency(YAxis.AxisDependency.RIGHT);
dataSet_by_type_OTHER.setAxisDependency(YAxis.AxisDependency.RIGHT);
final PieData data_defect = new PieData(dataSet_by_defect);
final PieData data_source = new PieData(dataSet_by_source);
final BarData data_type = new BarData(dataSet_by_type);
final BarData data_type_MINOR = new BarData(dataSet_by_type_MINOR);
final BarData data_type_MEDIUM = new BarData(dataSet_by_type_MEDIUM);
final BarData data_type_MAJOR = new BarData(dataSet_by_type_MAJOR);
final BarData data_type_OTHER = new BarData(dataSet_by_type_OTHER);
data_type.setDrawValues(true);
data_type_MAJOR.setDrawValues(true);
data_type_MEDIUM.setDrawValues(true);
data_type_MINOR.setDrawValues(true);
data_type_OTHER.setDrawValues(true);
data_defect.setDrawValues(false);
data_source.setDrawValues(false);
//data_type.setBarWidth(.75f);
piechart_defects.setDrawHoleEnabled(true);
piechart_defects.setHoleColor(Color.TRANSPARENT);
piechart_defects.setDrawEntryLabels(false);
piechart_defects.setDrawMarkers(false);
piechart_defects.setCenterText(String.valueOf(TOTAL_DEFECTS) + " TOTAL");
piechart_defects.setCenterTextSize(10);
piechart_defects.getDescription().setEnabled(false);
Legend l_defect = piechart_defects.getLegend();
l_defect.setVerticalAlignment(Legend.LegendVerticalAlignment.TOP);
l_defect.setHorizontalAlignment(Legend.LegendHorizontalAlignment.RIGHT);
l_defect.setOrientation(Legend.LegendOrientation.VERTICAL);
l_defect.setDrawInside(false);
l_defect.setEnabled(true);
l_defect.setFormSize(4f);
l_defect.setTextColor(Color.WHITE);
l_defect.setTextSize(.5f);
piechart_source.setDrawHoleEnabled(true);
piechart_source.setDrawEntryLabels(false);
piechart_source.setHoleColor(Color.TRANSPARENT);
piechart_source.setDrawMarkers(false);
piechart_source.setCenterText(String.valueOf(TOTAL_SOURCE) + " TOTAL");
piechart_source.setCenterTextSize(10);
piechart_source.getDescription().setEnabled(false);
piechart_source.setTransparentCircleRadius(10f);
Legend l_source = piechart_source.getLegend();
l_source.setVerticalAlignment(Legend.LegendVerticalAlignment.TOP);
l_source.setHorizontalAlignment(Legend.LegendHorizontalAlignment.RIGHT);
l_source.setOrientation(Legend.LegendOrientation.VERTICAL);
l_source.setDrawInside(false);
l_source.setEnabled(true);
l_source.setFormSize(4f);
l_source.setTextColor(Color.WHITE);
l_source.setTextSize(.5f);
dataSet_by_type.setValueTextColor(Color.WHITE);
dataSet_by_type_MAJOR.setValueTextColor(Color.WHITE);
dataSet_by_type_MEDIUM.setValueTextColor(Color.WHITE);
dataSet_by_type_MINOR.setValueTextColor(Color.WHITE);
dataSet_by_type_OTHER.setValueTextColor(Color.WHITE);
dataSet_by_type.setValueTextSize(7f);
dataSet_by_type_MINOR.setValueTextSize(7f);
dataSet_by_type_MEDIUM.setValueTextSize(7f);
dataSet_by_type_MAJOR.setValueTextSize(7f);
dataSet_by_type_OTHER.setValueTextSize(7f);
data_type.setBarWidth(.5f);
data_type.setValueFormatter(new ValueFormatter() {
#Override
public String getBarLabel(BarEntry barEntry) {
int barEntryY = (int) barEntry.getY();
return (String)barEntry.getData() + " " + "(" + String.valueOf(barEntryY) + ")";
}
});
Legend l_type_MAJOR = barchart_type.getLegend();
l_type_MAJOR.setCustom(legends_type_MAJOR);
l_type_MAJOR.setWordWrapEnabled(true);
l_type_MAJOR.setDrawInside(true);
data_type_MAJOR.setValueFormatter(new ValueFormatter() {
#Override
public String getBarLabel(BarEntry barEntry) {
int barEntryY = (int) barEntry.getY();
return (String)barEntry.getData() + " " + "(" + String.valueOf(barEntryY) + ")";
}
});
data_type_MAJOR.setBarWidth(.5f);
Legend l_type_OTHER = barchart_type.getLegend();
l_type_OTHER.setCustom(legends_type_OTHER);
l_type_OTHER.setWordWrapEnabled(true);
l_type_OTHER.setDrawInside(true);
data_type_OTHER.setValueFormatter(new ValueFormatter() {
#Override
public String getBarLabel(BarEntry barEntry) {
int barEntryY = (int) barEntry.getY();
return (String)barEntry.getData() + " " + "(" + String.valueOf(barEntryY) + ")";
}
});
data_type_OTHER.setBarWidth(.5f);
Legend l_type_MINOR = barchart_type.getLegend();
l_type_MINOR.setCustom(legends_type_MINOR);
l_type_MINOR.setWordWrapEnabled(true);
l_type_MINOR.setDrawInside(true);
data_type_MINOR.setValueFormatter(new ValueFormatter() {
#Override
public String getBarLabel(BarEntry barEntry) {
int barEntryY = (int) barEntry.getY();
return (String)barEntry.getData() + " " + "(" + String.valueOf(barEntryY) + ")";
}
});
Legend l_type_MEDIUM = barchart_type.getLegend();
l_type_MEDIUM.setCustom(legends_type_MEDIUM);
l_type_MEDIUM.setWordWrapEnabled(true);
l_type_MEDIUM.setDrawInside(true);
data_type_MEDIUM.setValueFormatter(new ValueFormatter() {
#Override
public String getBarLabel(BarEntry barEntry) {
int barEntryY = (int) barEntry.getY();
return (String)barEntry.getData() + " " + "(" + String.valueOf(barEntryY) + ")";
}
});
barchart_type.setRenderer(new MyRenderer(barchart_type, barchart_type.getAnimator(), barchart_type.getViewPortHandler()));
Legend l_type = barchart_type.getLegend();
l_type.setCustom(legends_type);
l_type.setWordWrapEnabled(true);
l_type.setDrawInside(true);
piechart_defects.setData(data_defect);
piechart_defects.invalidate();
piechart_source.setData(data_source);
piechart_source.invalidate();
barchart_type.getAxisLeft().setEnabled(false);
barchart_type.setScaleEnabled(false);
barchart_type.getAxisRight().setEnabled(false);
barchart_type.getAxisRight().setDrawGridLines(false);
barchart_type.getXAxis().setDrawLabels(false);
barchart_type.getXAxis().setDrawGridLines(false);
barchart_type.getDescription().setEnabled(false);
barchart_type.getLegend().setEnabled(false);
barchart_type.setBorderColor(Color.BLACK);
barchart_type.setDrawBorders(false);
barchart_type.setData(data_type);
barchart_type.setMinimumHeight(arr_by_type.size()*250);
barchart_type.invalidate();
Log.e("INFO BAR", data_type.toString());
piechart_defects.setOnChartValueSelectedListener(new OnChartValueSelectedListener() {
#Override
public void onValueSelected(Entry e, Highlight h) {
String value = String.valueOf((int)h.getY());
String toShow = "";
if(h.getX() == 0){
toShow = " MAJOR \n" + value;
chartSelectedDefect = "MAJOR";
barchart_type.setData(data_type_MAJOR);
}else if(h.getX() == 1){
toShow = " MEDIUM \n" + value;
chartSelectedDefect = "MEDIUM";
barchart_type.setData(data_type_MEDIUM);
barchart_type.invalidate();
}else if(h.getX() == 2){
toShow = " MINOR \n" + value ;
chartSelectedDefect = "MINOR";
barchart_type.setData(data_type_MINOR);
}else if(h.getX() == 3){
toShow = " OTHER \n" + value;
chartSelectedDefect = "OTHER";
barchart_type.setData(data_type_OTHER);
How it appears when I show all:

Trouble receiving images from an 8 modem server

I have implemented this java app communicating with a remote 8 modem server and when I request for an image from a security camera (error free or not) the outcome is a 1 byte image (basically a small white square). Can you help me find the error?
Here is my code:
import java.io.*;
import java.util.Scanner;
import ithakimodem.Modem;
public class g {
private static Scanner scanner = new Scanner(System.in);
private static String EchoCode = "E3369";
private static String noImageErrorscode = "M2269";
private static String imageErrorsCode = "G6637";
private static String GPS_Code = "P7302";
private static String ACK_Code = "Q2591";
private static String NACK_Code = "R4510";
public static void main(String[] args) throws IOException, InterruptedException {
int timeout = 2000;
int speed = 80000;
Modem modem = new Modem(speed);
modem.setTimeout(timeout);
modem.write("ATD2310ITHAKI\r".getBytes());
getConsole(modem);
modem.write(("test\r").getBytes());
getConsole(modem);
while (true) {
System.out.println("\nChoose one of the options below :");
System.out.print("Option 0: Exit\n" + "Option 1: Echo packages\n" + "Option 2: Image with no errors\n" + "Option 3: Image with errors\n" + "Option 4: Tracks of GPS\n"
+ "Option 5: ARQ\n" );
System.out.print("Insert option : ");
int option = scanner.nextInt();
System.out.println("");
switch (option) {
case 0: {
// Exit
System.out.println("\nExiting...Goodbye stranger");
modem.close();
scanner.close();
return;
}
case 1: {
// Echo
getEcho(modem, EchoCode);
break;
}
case 2: {
// Image with no errors
getImage(modem, noImageErrorscode, "no_error_image.jpg");
break;
}
case 3: {
// Image with errors
getImage(modem, imageErrorsCode, "image_with_errors.jpg");
}
case 4: {
// GPS
getGPS(modem, GPS_Code);
break;
}
case 5: {
// ARQ
getARQ(modem, ACK_Code, NACK_Code);
break;
}
default:
System.out.println("Try again please\n");
}
}
}
public static String getConsole(Modem modem) {
int l;
StringBuilder stringBuilder= new StringBuilder();
String string = null;
while (true) {
try {
l = modem.read();
if (l == -1) {
break;
}
System.out.print((char) l);
stringBuilder.append((char) l);
string = stringBuilder.toString();
} catch (Exception exc) {
break;
}
}
System.out.println("");
return string;
}
private static void getImage(Modem modem, String password, String fileName) throws IOException {
int l;
boolean flag;
FileOutputStream writer = new FileOutputStream(fileName, false);
flag = modem.write((password + "\r").getBytes());
System.out.println("\nReceiving " + fileName + "...");
if (!flag) {
System.out.println("Code error or end of connection");
writer.close();
return;
}
while (true) {
try {
l = modem.read();
writer.write((char) l);
writer.flush();
if (l == -1) {
System.out.println("END");
break;
}
}
catch (Exception exc) {
break;
}
}
writer.close();
}
private static void getGPS(Modem modem, String password) throws IOException {
int rows = 99;
String Rcode = "";
Rcode = password + "R=10200" + rows;
System.out.println("Executing R parameter = " + Rcode + " (XPPPPLL)");
modem.write((Rcode + "\r").getBytes());
String stringConsole = getConsole(modem);
if (stringConsole == null) {
System.out.println("Code error or end of connection");
return;
}
String[] stringRows = stringConsole.split("\r\n");
if (stringRows[0].equals("n.a")) {
System.out.println("Error : Packages not received");
return;
}
System.out.println("**TRACES**\n");
float l1 = 0, l2 = 0;
int difference = 8;
difference *= 100 / 60;
int traceNumber = 7;
String[] traces = new String[traceNumber + 1];
int tracesCounter = 0, flag = 0;
for (int i = 0; i < rows; i++) {
if (stringRows[i].startsWith("$GPGGA")) {
if (flag == 0) {
String str = stringRows[i].split(",")[1];
l1 = Integer.valueOf(str.substring(0, 6)) * 100 / 60;
flag = 1;
}
String str = stringRows[i].split(",")[1];
l2 = Integer.valueOf(str.substring(0, 6)) * 100 / 60;
if (Math.abs(l2 - l1) >= difference) {
traces[tracesCounter] = stringRows[i];
if (tracesCounter == traceNumber)
break;
tracesCounter++;
l1 = l2;
}
}
}
for (int i = 0; i < traceNumber; i++) {
System.out.println(traces[i]);
}
String w = "", T_cd_fnl = password + "T=";
int p = 1;
System.out.println();
for (int i = 0; i < traceNumber; i++) {
String[] strSplit = traces[i].split(",");
System.out.print("T parameter = ");
String str1 = strSplit[4].substring(1, 3);
String str2 = strSplit[4].substring(3, 5);
String str3= String.valueOf(Integer.parseInt(strSplit[4].substring(6, 10)) * 60 / 100).substring(0, 2);
String str4= strSplit[2].substring(0, 2);
String str5= strSplit[2].substring(2, 4);
String str6= String.valueOf(Integer.parseInt(strSplit[2].substring(5, 9)) * 60 / 100).substring(0, 2);
w = str1 + str2 + str3 + str4 + str5 + str6 + "T";
p = p + 5;
System.out.println(w);
T_cd_fnl = T_cd_fnl + w + "=";
}
T_cd_fnl = T_cd_fnl.substring(0, T_cd_fnl.length() - 2);
System.out.println("\nSending code: " + T_cd_fnl);
getImage(modem, T_cd_fnl, "traces GPS.jpg");
}
private static void getEcho(Modem modem, String strl) throws InterruptedException, IOException {
FileOutputStream echo_time_writer = new FileOutputStream("echoTimes.txt", false);
FileOutputStream echo_counter_writer = new FileOutputStream("echoCounter.txt", false);
int h;
int runtime = 5, packetCounter = 0;
String str = "";
System.out.println("\nRuntime (mins) = " + runtime + "\n");
long start_time = System.currentTimeMillis();
long stop_time = start_time + 60 * 1000 * runtime;
long send_time = 0, receiveTime = 0;
String time = "", ClockTime = "";
echo_time_writer.write("Clock Time\tSystem Time\r\n".getBytes());
while (System.currentTimeMillis() <= stop_time) {
packetCounter++;
send_time = System.currentTimeMillis();
modem.write((strl + "\r").getBytes());
while (true) {
try {
h = modem.read();
System.out.print((char) h);
str += (char) h;
if ( h== -1) {
System.out.println("\nCode error or end of connection");
return;
}
if (str.endsWith("PSTOP")) {
receiveTime = System.currentTimeMillis();
ClockTime = str.substring(18, 26) + "\t";
time = String.valueOf((receiveTime - send_time) + "\r\n");
echo_time_writer.write(ClockTime.getBytes());
echo_time_writer.write(time.getBytes());
echo_time_writer.flush();
str = "";
break;
}
} catch (Exception e) {
break;
}
}
System.out.println("");
}
echo_counter_writer.write(("\r\nRuntime: " + String.valueOf(runtime)).getBytes());
echo_counter_writer.write(("\r\nPackets Received: " + String.valueOf(packetCounter)).getBytes());
echo_counter_writer.close();
echo_time_writer.close();
}
private static void getARQ(Modem modem, String strl, String nack_code) throws IOException, InterruptedException {
FileOutputStream arq_time_writer = new FileOutputStream("ARQtimes.txt", false);
FileOutputStream arq_counter_writer = new FileOutputStream("ARQcounter.txt", false);
int runtime = 5;
int xor = 1;
int f = 1;
int m;
int packageNumber = 0;
int iterationNumber = 0;
int[] nack_times_counter = new int[15];
int nack_to_package = 0;
String time = "", clock_time = "", s = "";
long start_time = System.currentTimeMillis();
long stop_time = start_time + 60 * 1000 * runtime;
long send_time = 0, receive_time = 0;
System.out.printf("Runtime (mins) = " + runtime + "\n");
arq_time_writer.write("Clock Time\tSystem Time\tPacket Resends\r\n".getBytes());
while (System.currentTimeMillis() <= stop_time) {
if (xor == f) {
packageNumber++;
nack_times_counter[nack_to_package]++;
nack_to_package = 0;
send_time = System.currentTimeMillis();
modem.write((strl + "\r").getBytes());
} else {
iterationNumber++;
nack_to_package++;
modem.write((nack_code + "\r").getBytes());
}
while (true) {
try {
m = modem.read();
System.out.print((char) m);
s += (char) m;
if (m == -1) {
System.out.println("\nCode error or end of connection");
return;
}
if (s.endsWith("PSTOP")) {
receive_time = System.currentTimeMillis();
break;
}
} catch (Exception e) {
break;
}
}
System.out.println("");
String[] string = s.split("<");
string = string[1].split(">");
f = Integer.parseInt(string[1].substring(1, 4));
xor = string[0].charAt(0) ^ string[0].charAt(1);
for (int i = 2; i < 16; i++) {
xor = xor ^ string[0].charAt(i);
}
if (xor == f) {
System.out.println("Packet ok");
receive_time = System.currentTimeMillis();
time = String.valueOf((receive_time - send_time) + "\t");
clock_time = s.substring(18, 26) + "\t";
arq_time_writer.write(clock_time.getBytes());
arq_time_writer.write(time.getBytes());
arq_time_writer.write((String.valueOf(nack_to_package) + "\r\n").getBytes());
arq_time_writer.flush();
} else {
xor = 0;
}
s = "";
}
arq_counter_writer.write(("\r\nRuntime: " + String.valueOf(runtime)).getBytes());
arq_counter_writer.write("\r\nPackets Received (ACK): ".getBytes());
arq_counter_writer.write(String.valueOf(packageNumber).getBytes());
arq_counter_writer.write("\r\nPackets Resent (NACK): ".getBytes());
arq_counter_writer.write(String.valueOf(iterationNumber).getBytes());
arq_counter_writer.write("\r\nNACK Time Details".getBytes());
for (int i = 0; i < nack_times_counter.length; i++) {
arq_counter_writer.write(("\r\n" + i + ":\t" + nack_times_counter[i]).getBytes());
}
arq_counter_writer.close();
arq_counter_writer.close();
System.out.println("Packets Received: " + packageNumber);
System.out.println("Packets Resent: " + iterationNumber);
System.out.println("\n\nFile arqTimes.txt is created.");
System.out.println("File arqCounter.txt is created.");
}
}
I know the problem is most probably in the getImage() function but I haven't figured it out yet.

Every time a new swing windows opens in code that is executed in a for loop

Every time a new swing windows opens in code that is executed in a for loop.
I have some code that is runned in a for loop.
The for loop gives every time a new value to my multidimensional array.
But, every time my for loop creates open's a new windows but does not close the old window.
How can I solve this issues? I want to close the old windows and refresh my window with the new actual values or that only one Windows is opened and that the new values of the for loop refreshes the values inside the table based on the multidimensional array names data.
Because now more than 200 (every second a new windows is opened) windows are opened and after 1 minute I don’t see new values appearing on my window and the computer freezes.
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.SwingUtilities;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.Timer;
public class UAppWin {
private static final int Status_COL = 1;
String[][] data = new String[3][3];
int dptr;
String[] cols = { "Feature", "Status", "Value" };
public UAppWin(String label, int nphases) {
System.out.println("UApp \"" + label + "\" (" + nphases + " phases)");
}
void newCycle(int phasenr) {
System.out.println("UApp =============================");
dptr = 0;
}
void addEntry(int index, double tim, String label, int status, double dval) {
System.out.println("Uapp [" + index + "] " + label + "(" + status + ") " + dval);
data[dptr][0] = label;
data[dptr][1] = "" + status;
data[dptr][2] = "" + dval;
dptr++;
}
void addMessage(String msg) {
System.out.println("Uapp alert: " + msg);
// rode balk met bericht
}
void deleteMessage() {
}
void endCycle() {
System.out.println("UApp =============================");
SwingUtilities.invokeLater(new Runnable() {
public void run() {
JOptionPane.showMessageDialog(null, new JScrollPane(getNewRenderedTable(getTable())));
}
});
}
private JTable getTable() {
DefaultTableModel model = new DefaultTableModel(data, cols);
return new JTable(model) {
#Override
public Dimension getPreferredScrollableViewportSize() {
return new Dimension(300, 150);
}
};
}
private static JTable getNewRenderedTable(final JTable table) {
table.setDefaultRenderer(Object.class, new DefaultTableCellRenderer() {
#Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
boolean hasFocus, int row, int col) {
super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, col);
String Status = (String) table.getModel().getValueAt(row, Status_COL);
if ("0".equals(Status)) {
setBackground(Color.GREEN);
setForeground(Color.BLACK);
} else if ("2".equals(Status)) {
setBackground(Color.RED);
setForeground(Color.BLACK);
} else {
setBackground(Color.YELLOW);
setForeground(Color.BLACK);
}
return this;
}
});
return table;
}
}
The second part of the code edited:
public class HmOpIntf {
static final String DFLT_IP_ADDR = "127.0.0.1";
static final int DFLT_IP_PORT = 9502;
static final int DFLT_MB_UNIT = 1;
static final int DFLT_POLL_TM = 2;
public static ModbusClient connectPLC(String ipAddr, int port, int unitNr)
{
ModbusClient mc = null;
System.out.println("Connecting to " + ipAddr + " port " + port + " unit " + unitNr);
try {
mc = new ModbusClient(ipAddr, port);
mc.Connect();
mc.setUnitIdentifier((byte)unitNr);
mc.WriteSingleCoil(0, true);
} catch (Exception e) {
System.err.println("*** connectPLC: exception caught");
return null;
}
System.out.println("Connected!");
return mc;
}
public static void disconnectPLC(ModbusClient mc)
{
mc = null;
}
public static String MyConvertRegistersToString(int[] regs, int startIx, int len) {
char[] ca = new char[len];
for (int i = 0; i < len; i++) {
ca[i] = (char) regs[startIx + i];
}
return new String(ca);
}
/**
* #param args the command line arguments
*/
public static void main(String[] args) {
ModbusClient mc = null;
String ipAddr = DFLT_IP_ADDR;
int ipPort = DFLT_IP_PORT;
int mbUnit = DFLT_MB_UNIT;
int pollTime = DFLT_POLL_TM;
int tlBase = 2000; /* Offset in PLC's holding registry */
int tlBlocksz = 84;
String[] tlLabel = {"T4 mould", "T injection valve"}; /* Default */
int trafLightNum = tlLabel.length;
String[] colors = { "green", "yellow", "red" };
int status;
// Notifications.infoBox("Hello world!", "Welcome message");
if (args != null && args.length > 0) {
if (args.length < 4) {
System.err.println("*** Error (" + args.length +"): arguments are: ip-addr port unit polltime label-1 ...");
System.exit(1);
}
ipAddr = args[0];
ipPort = Integer.parseInt(args[1]);
mbUnit = Integer.parseInt(args[2]);
pollTime = Integer.parseInt(args[3]);
}
if (args.length > 4) {
trafLightNum = args.length - 4;
tlLabel = new String[trafLightNum];
for (int i = 0; i < trafLightNum; i++) {
tlLabel[i] = args[i + 4];
}
}
// Scope sc = new Scope();
// sc.runScope();
if ((mc = connectPLC(ipAddr, ipPort, mbUnit)) == null) {
System.out.println("*** Failed to connect to PLC");
System.exit(1);
}
TrafficLight tlLast = null;
int[] values = new int[tlBlocksz];
TrafficLight[] tl = new TrafficLight[trafLightNum];
Scope[] sc = new Scope[trafLightNum];
Notifications nots = new Notifications(trafLightNum);
int locX, locY;
for (int i = 0; i < tl.length; i++) {
tl[i] = new TrafficLight();
tl[i].setLbl(tlLabel[i]);
tl[i].setVisible(true);
if (tlLast != null) {
locX = tlLast.getLocation().x;
locY = tlLast.getLocation().y + tlLast.getHeight();
} else {
locX = tl[i].getLocation().x;
locY = tl[i].getLocation().y;
}
tl[i].setLocation(locX, locY);
sc[i] = new Scope(tlLabel[i], locX + tl[i].getWidth(), locY, 320, 290 /* tl[i].getHeight()-80 */ );
sc[i].setGrid(10, 5);
tlLast = tl[i];
}
UAppWin uw = new UAppWin("RTM Facility", 5);
int phase = 1;
// tl2.setVisible(true); tl2.setLocation(tl.getWidth(), 0);
try {
double t = 0.0;
int[] dreg = new int[2];
for (;;) {
uw.newCycle(phase);
for (int i = 0; i < tl.length; i++) {
values = mc.ReadHoldingRegisters(tlBase + i * tlBlocksz, values.length);
status = values[0];
if (status >= 0 && status < colors.length) {
// System.out.println(i + ": " + colors[status]);
if (status == 0) tl[i].greenOn();
else if (status == 1) tl[i].yellowOn();
else tl[i].redOn();
}
else
System.out.println("Status value " + i + " out of range: " + status);
dreg[0] = values[1]; dreg[1] = values[2];
double dval = (double) ModbusClient.ConvertRegistersToFloat(dreg);
sc[i].addValue(t, dval);
sc[i].drawSignal();
// w.addEntry(int i, float t, String label, int status (o = groen, 1 = yellow, 2 = red), float dval);
uw.addEntry(i, t, tlLabel[i], status, dval);
int msglen = values[3];
if (msglen > 0) {
String msg = MyConvertRegistersToString(values, 4, msglen);
System.out.println("DEBUG: received message for " + tlLabel[i] + ": " + msg);
nots.notify(i, msg);
uw.addMessage(msg);
}
else {
nots.notify(i, null);
uw.deleteMessage();
}
// System.out.println("Received for set " + i + ": status=" + status + " dval=" + dval + " msglen=" + msglen);
}
uw.endCycle();
t += 1.0;
Thread.sleep(pollTime * 500);
}
} catch (Exception e) {
System.out.println("*** Failed to communicate with PLC - exit");
System.exit(1);
}
try {
mc.Disconnect();
} catch (IOException e) {
System.out.println("*** Failed to disconnect from PLC");
}
}
}
The UappWin seems to be tied to a window. So when you create it, also create a JFrame. Then nothing else would need to change except the run method and declaring the JFrame.
public class UAppWin {
private JFrame frame;
public UAppWin(String label, int nphases) {
//System.out.println("UApp \"" + label + "\" (" + nphases + " phases)");
JLabel label = new JLabel("UApp \"" + label + "\" (" + nphases + " phases)");
frame = new JFrame("title");
frame.add(label);
frame.setVisible(true);
}
Then when you create the window.
SwingUtilities.invokeLater(new Runnable() {
public void run() {
//JOptionPane.showMessageDialog(null, new JScrollPane(getNewRenderedTable(getTable())));
frame.setContentPane( new JScrollPane( getNewRenderedTable( getTable() ) );
frame.setVisible(true);
}
});

The orientation of the rear camera is inverse portrait

I'm trying to switch the camera with a button and i'm also successul in that but the proble is that the preview of the rear camera is in inverse portrait i've tryied the setDiplayOrientation, but no changes... Maybe i've put it in a wrong line this is the code and with the button i call the start camera method:
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
Any type of help is appreciated ... thank you so much
here is the preview that i see
I have tried making sense, but there is few knowledge on what API Level your trying to target , that may also be the problem or maybe the problem may be the Test Device.
But lets rewrite the camera Display code:
1)
public void setCameraDisplayOrientation(android.hardware.Camera camera) {
Camera.Parameters parameters = camera.getParameters();
android.hardware.Camera.CameraInfo camInfo =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getBackFacingCameraId(), camInfo);
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (camInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (camInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (camInfo.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Call setCameraDisplayOrientation() method in surfaceCreated callback as the following:
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
camera = Camera.open();
setCameraDisplayOrientation(getActivity(), CameraInfo.CAMERA_FACING_BACK, camera);
}
Method 2 is to try:
Get the phone's orientation by using a sensor and this makes life much easier because the orientation will be properly handle
You can find more about this within this link: Is Android's CameraInfo.orientation correctly documented? Incorrectly implemented?
Method 3:
edit your code like
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
//Below i have tried to change the camera facing
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor
(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
This is sometimes due to android not knowing what orientation is the previous activity coming from, try setting the orientation of the application with
android:screenOrientation="nosensor"
or try the following:
Add the orientation attribute in the manifest
android:screenOrientation=["unspecified" | "behind" |
"landscape" | "portrait" |
"reverseLandscape" | "reversePortrait" |
"sensorLandscape" | "sensorPortrait" |
"userLandscape" | "userPortrait" |
"sensor" | "fullSensor" | "nosensor" |
"user" | "fullUser" | "locked"]
So in your case it will be
<activity android:name=".yourCameractivity"
....
android:screenOrientation="sensorPortrait"/>

How to resolve huge matrix multiplication performance issue in Android

I have huge .csv files (the biggest one, 72mb) that I need to load to my Android App. They contain matrices e.g [7500x1000], which I later use to multiply another one. Because their are too big to read them at one time () I'm reading line after line and perform some multiplication. I tested this on Nexus 4 and It took something about 15 minutes to do everything. When I copied the code from Android project to JAVA one(the only difference is that in Android i'm using Bitmap and in JAVA BufferedImage) and ran this as java application it took few seconds. Do you have any idea why is so and how to shorten this time? Here Is my code:
Android:
public class NetworkThread extends Thread {
private static boolean threadIsWorking = false;
private Context context;
private SQLiteHandler sql;
private static NetworkThread REFERENCE = null;
private String w1 = "w1.csv";
private String w2 = "w2.csv";
private String w3 = "w3.csv";
private String w4 = "w4.csv";
String NEURON_PATH = "/data/data/com.ZPIProject/neuronFiles/";
public static NetworkThread getInstance(Context context, SQLiteHandler sql) {
if (REFERENCE == null)
REFERENCE = new NetworkThread(context, sql);
return REFERENCE;
}
private NetworkThread(Context context, SQLiteHandler sql) {
this.context = context;
this.sql = sql;
}
#Override
public void start() {
if (!threadIsWorking) {
threadIsWorking = true;
try {
Log.i("MATRIX", "THREAD ID: " + Thread.currentThread().getId());
Bitmap bit = BitmapFactory.decodeResource(context.getResources(), R.drawable.aparat_small);
double[] imageInfo = ImageUtils.getImageInfo(bit);
copyNeuronWeightsToMemoryOnPhone();
double[] m4 = multiplyImageWithNeuronWeights(imageInfo);
List<WeightWithId> best10 = findBest10Results(m4);
for (int i = 0; i < best10.size(); i++) {
Log.e("IDS", best10.get(i).getId() + "");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
private List<WeightWithId> findBest10Results(double[] m4) throws SQLException, CloneNotSupportedException {
List<WeightWithId> best10 = new ArrayList<WeightWithId>(20);
for (int j = 0; j < 19; j++) {
Map<Integer, double[]> readDescriptions = sql.getDescriptionForShoeId(j * 100, j * 100 + 100);
List<WeightWithId> distances = new ArrayList<WeightWithId>(100);
for (Integer i : readDescriptions.keySet()) {
double dist = dist(m4, readDescriptions.get(i));
distances.add(new WeightWithId(i, dist));
}
Collections.sort(distances);
for (int i = 0; i < 10; i++) {
best10.add(distances.get(i).clone());
}
Collections.sort(best10);
if (best10.size() > 10) {
for (int i = 10; i < best10.size(); i++) {
best10.remove(i);
}
}
}
return best10;
}
private double[] multiplyImageWithNeuronWeights(double[] imageInfo) throws IOException {
Log.i("MATRIX MULTIPLY", "M1");
double[] m1 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w1, imageInfo, 1000, false);
Log.i("MATRIX MULTIPLY", "M2");
double[] m2 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w2, m1, 500, false);
Log.i("MATRIX MULTIPLY", "M3");
double[] m3 = MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w3, m2, 250, false);
Log.i("MATRIX MULTIPLY", "M4");
return MatrixMaker.multiplyMatrixWithCsvMatrix(NEURON_PATH + w4, m3, 50, true);
}
private void copyNeuronWeightsToMemoryOnPhone() throws IOException {
Log.i("MATRIX COPY", "W1");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w1, context);
Log.i("MATRIX COPY", "W2");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w2, context);
Log.i("MATRIX COPY", "W3");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w3, context);
Log.i("MATRIX COPY", "W4");
CopyUtils.copyFileIntoDevice(NEURON_PATH, w4, context);
}
private double dist(double[] a, double[] b) {
double result = 0;
for (int i = 0; i < a.length; i++)
result += (a[i] - b[i]) * (a[i] - b[i]);
return result;
}
}
Matrix Operations:
public class MatrixMaker {
public static double[] multiplyImageInfoWithCsvMatrix(String csvFilePath, double[] imageInfo, int expectedSize, boolean lastOne) throws IOException {
InputStream inputStream = new FileInputStream(new File(csvFilePath));
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String line;
//counter - to which position calculated value should be setted
int counter = 0;
//result array
double[] multipliedImageInfoWithCsvMatrix = new double[expectedSize + 1];
//declaration of array for read line (parsed into double array)
double[] singleLineFromCsv = new double[imageInfo.length];
while ((line = bufferedReader.readLine()) != null) {
//splitting and parsing values from read line
String[] splitted = line.split(",");
for (int i = 0; i < splitted.length; i++) {
singleLineFromCsv[i] = Double.valueOf(splitted[i]);
}
//multiply imageInfo array with single row from csv file
multipliedImageInfoWithCsvMatrix[counter] = multiplyOneRow(imageInfo, singleLineFromCsv);
//ugly flag 'lastOne' needed to other business case
if (!lastOne) {
multipliedImageInfoWithCsvMatrix[counter] = 1d / (1 + Math.exp(-multipliedImageInfoWithCsvMatrix[counter]));
}
counter++;
//logging progress
if (counter % 100 == 0)
Log.i("MULTIPLY PROGRESS", counter + " " + System.currentTimeMillis());
}
if (!lastOne)
multipliedImageInfoWithCsvMatrix[expectedSize] = 1d;
bufferedReader.close();
inputStream.close();
return multipliedImageInfoWithCsvMatrix;
}
private static double multiplyOneRow(double first[], double second[]) {
double result = 0d;
for (int i = 0; i < first.length; i++) {
result += first[i] * second[i];
}
return result;
}
}
onCreate in one of Activity
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
final SQLiteHandler sql = new SQLiteHandler(this);
sql.init();
NetworkThread.getInstance(this, sql).start();
} catch (IOException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
JAVA:
Multiply equivalent of Matrix Maker
import java.io.*;
public class Multiply {
public static double[] multiplyMatrixWithCsvMatrix(String csvFilePath, double[] imageInfo,
int expectedSize, boolean lastOne) throws IOException {
InputStream inputStream = new FileInputStream(new File(csvFilePath));
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String line;
// counter - to which position calculated value should be setted
int counter = 0;
// result array
double[] multipliedImageInfoWithCsvMatrix = new double[expectedSize + 1];
// declaration of array for read line (parsed into double array)
double[] singleLineFromCsv = new double[imageInfo.length];
while ((line = bufferedReader.readLine()) != null) {
// splitting and parsing values from read line
String[] splitted = line.split(",");
for (int i = 0; i < splitted.length; i++) {
singleLineFromCsv[i] = Double.valueOf(splitted[i]);
}
// multiply imageInfo array with single row from csv file
multipliedImageInfoWithCsvMatrix[counter] = multiplyOneRow(imageInfo, singleLineFromCsv);
// ugly flag 'lastOne' needed to other business case
if (!lastOne) {
multipliedImageInfoWithCsvMatrix[counter] = 1d / (1 + Math
.exp(-multipliedImageInfoWithCsvMatrix[counter]));
}
counter++;
// logging progress
if (counter % 100 == 0)
System.out.println(counter + " " + System.currentTimeMillis());
}
if (!lastOne)
multipliedImageInfoWithCsvMatrix[expectedSize] = 1d;
bufferedReader.close();
inputStream.close();
return multipliedImageInfoWithCsvMatrix;
}
private static double multiplyOneRow(double first[], double second[]) {
double result = 0d;
for (int i = 0; i < first.length; i++) {
result += first[i] * second[i];
}
return result;
}
}
Executable class
public class MRunner {
private static final int RED_INDEX = 0;
private static final int GREEN_INDEX = 2500;
private static final int BLUE_INDEX = 5000;
private static final String w1 = "w1.csv";
private static final String NEURON_PATH = "C:\\Users\\Vortim\\Desktop\\";
public static void main(String[] args) {
new Thread(new Runnable() {
#Override
public void run() {
try {
Multiply.multiplyMatrixWithCsvMatrix(NEURON_PATH + w1, getImageInfo(ImageIO
.read(new File("C:\\Users\\Vortim\\git\\MatrixMaker\\but.png"))), 1000,
false);
} catch (IOException e) {
e.printStackTrace();
}
}
}).start();
}
public static double[] getImageInfo(BufferedImage source) {
double[] result = new double[7501];
int width = source.getWidth();
int height = source.getHeight();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
if (x == 0) {
result[y + RED_INDEX] = 255d;
result[y + GREEN_INDEX] = 255d;
result[y + BLUE_INDEX] = 255d;
} else if (y == 0) {
result[x * 50 + RED_INDEX] = 255d;
result[x * 50 + GREEN_INDEX] = 255d;
result[x * 50 + BLUE_INDEX] = 255d;
} else {
int pixel = source.getRGB(x, y);
double r = (pixel) >> 16 & 0xff;
double g = (pixel) >> 8 & 0xff;
double b = (pixel) & 0xff;
result[x * y + RED_INDEX] = 1d - (r / 255d);
result[x * y + GREEN_INDEX] = 1d - (g / 255d);
result[x * y + BLUE_INDEX] = 1d - (b / 255d);
}
}
}
result[7500] = 1;
return result;
}
}

Categories