I've followed the instructions in this tutorial: http://code.tutsplus.com/tutorials/create-a-live-wallpaper-on-android-using-an-animated-gif--cms-23088
But I have had a few errors and am unable to run my project.
This is all my code:
My manifest:
<service
android:name=".GIFWallpaperService"
android:enabled="true"
android:label="Raindrops In Paris"
android:permission="android.permission.BIND_WALLPAPER" >
<intent-filter>
<action android:name="android.service.wallpaper.WallpaperService"/>
</intent-filter>
<meta-data
android:name="android.service.wallpaper"
android:resource="#xml/wallpaper" >
</meta-data>
</service>
<uses-feature
android:name="android.software.live_wallpaper"
android:required="true" >
</uses-feature>
My Java class:
package com.gacafw.gina.raindropsinparis;
import android.graphics.Canvas;
import android.graphics.Movie;
import android.os.Handler;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.IOException;
public class GIFWallpaperService extends WallpaperService {
#Override
public WallpaperService.Engine onCreateEngine() {
try {
Movie movie = Movie.decodeStream(
getResources().getAssets().open("rainDropAna.gif"));
return new GIFWallpaperEngine(movie);
}catch(IOException e){
Log.d("GIF", "Could not load asset");
return null;
}
}
private Runnable drawGIF = new Runnable() {
public void run() {
draw();
}
};
private void draw() {
if (visible) {
Canvas canvas = holder.lockCanvas();
canvas.save();
// Adjust size and position so that
// the image looks good on your screen
canvas.scale(3f, 3f);
movie.draw(canvas, -100, 0);
canvas.restore();
holder.unlockCanvasAndPost(canvas);
movie.setTime((int) (System.currentTimeMillis() % movie.duration()));
handler.removeCallbacks(drawGIF);
handler.postDelayed(drawGIF, frameDuration);
}
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawGIF);
} else {
handler.removeCallbacks(drawGIF);
}
}
private class GIFWallpaperEngine extends WallpaperService.Engine {
private final int frameDuration = 20;
private SurfaceHolder holder;
private Movie movie;
private boolean visible;
private Handler handler;
public GIFWallpaperEngine(Movie movie) {
this.movie = movie;
handler = new Handler();
}
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
this.holder = surfaceHolder;
}
#Override
public void onDestroy() {
super.onDestroy();
handler.removeCallbacks(drawGIF);
}
}
}
My wallpaper.xml
<?xml version="1.0" encoding="UTF-8"?>
<wallpaper
xmlns:android="http://schemas.android.com/apk/res/android"
android:label="Raindrops In Paris"
android:thumbnail="#drawable/ic_launcher">
</wallpaper>
My errors currently:
The variables visible, holder, movie, handler in the draw() and onVisibilityChanged() are giving the error Cannot Resolve Symbol. I assume this is because they are out of scope in these methods?
I think I interpreted the instructions wrong but I can't figure out where I went wrong.
The tut contains an error - where it says "Add the following code to the GIFWallpaperService class:" it should say add it to the GIFWallpaperEngine class.
I had the same problem.I created an An Activity and passed an intent to run the wallpaper.here is your answer
public class SetWallpaperActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
Intent intent = new Intent(
WallpaperManager.ACTION_CHANGE_LIVE_WALLPAPER);
intent.putExtra(WallpaperManager.EXTRA_LIVE_WALLPAPER_COMPONENT,
new ComponentName(this, GIFWallpaperService.class));
startActivity(intent);
}
}
Related
This is ClassifierActivity.java file which is rendering by default:
package org.tensorflow.lite.examples.classification;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Typeface;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Size;
import android.util.TypedValue;
import android.widget.Toast;
import java.io.IOException;
import java.util.List;
import org.tensorflow.lite.examples.classification.env.BorderedText;
import org.tensorflow.lite.examples.classification.env.Logger;
import org.tensorflow.lite.examples.classification.tflite.Classifier;
import org.tensorflow.lite.examples.classification.tflite.Classifier.Device;
import org.tensorflow.lite.examples.classification.tflite.Classifier.Model;
public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
private static final float TEXT_SIZE_DIP = 10;
private Bitmap rgbFrameBitmap = null;
private long lastProcessingTimeMs;
private Integer sensorOrientation;
public Classifier classifier;
private BorderedText borderedText;
/** Input image size of the model along x axis. */
private int imageSizeX;
/** Input image size of the model along y axis. */
private int imageSizeY;
#Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment;
}
#Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
#Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
recreateClassifier(getModel(), getDevice(), getNumThreads());
if (classifier == null) {
LOGGER.e("No classifier on preview!");
return;
}
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
}
#Override
protected void processImage() {
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
final int cropSize = Math.min(previewWidth, previewHeight);
runInBackground(
new Runnable() {
#Override
public void run() {
if (classifier != null) {
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results =
classifier.recognizeImage(rgbFrameBitmap, sensorOrientation);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
LOGGER.v("Detect: %s", results);
runOnUiThread(
new Runnable() {
#Override
public void run() {
showResultsInBottomSheet(results);
showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(imageSizeX + "x" + imageSizeY);
showCameraResolution(cropSize + "x" + cropSize);
showRotationInfo(String.valueOf(sensorOrientation));
showInference(lastProcessingTimeMs + "ms");
}
});
}
readyForNextImage();
}
});
}
#Override
protected void onInferenceConfigurationChanged() {
if (rgbFrameBitmap == null) {
// Defer creation until we're getting camera frames.
return;
}
final Device device = getDevice();
final Model model = getModel();
final int numThreads = getNumThreads();
runInBackground(() -> recreateClassifier(model, device, numThreads));
}
private void recreateClassifier(Model model, Device device, int numThreads) {
if (classifier != null) {
LOGGER.d("Closing classifier.");
classifier.close();
classifier = null;
}
if (device == Device.GPU && model == Model.QUANTIZED) {
LOGGER.d("Not creating classifier: GPU doesn't support quantized models.");
runOnUiThread(
() -> {
Toast.makeText(this, "GPU does not yet supported quantized models.", Toast.LENGTH_LONG)
.show();
});
return;
}
try {
LOGGER.d(
"Creating classifier (model=%s, device=%s, numThreads=%d)", model, device, numThreads);
classifier = Classifier.create(this, model, device, numThreads);
} catch (IOException e) {
LOGGER.e(e, "Failed to create classifier.");
}
// Updates the input image size.
imageSizeX = classifier.getImageSizeX();
imageSizeY = classifier.getImageSizeY();
}
}
I created a new activity named Main.java and I want this activity to render first and pass it ClassifierActivity.java as intent by click on button:
package org.tensorflow.lite.examples.classification;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import android.widget.VideoView;
public class Main extends AppCompatActivity {
VideoView videoView;
private Button btn;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// hide title bar
getSupportActionBar().hide();
// set button on click to scan where open the camera
btn=(Button)findViewById(R.id.button);
btn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
openActivity();
}
});
videoView = findViewById(R.id.videoview);
Uri uri = Uri.parse("android.resource://"+getPackageName()+"/"+R.raw.turkey);
videoView.setVideoURI(uri);
videoView.start();
videoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
mp.setLooping(true);
}
});
}
protected void openActivity(){
Intent i = new Intent(this, ClassifierActivity.class);
startActivity(i);
}
#Override
protected void onPostResume() {
videoView.resume();
super.onPostResume();
}
#Override
protected void onRestart() {
videoView.start();
super.onRestart();
}
#Override
protected void onPause() {
videoView.suspend();
super.onPause();
}
#Override
protected void onDestroy() {
videoView.stopPlayback();
super.onDestroy();
}
}
This is old AndroidManifest.xml (app running successfully)
<activity
android:name=".ClassifierActivity"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
And I want to run Main first. So, I changed android:name=".ClassifierActivity" from old AndroidManifest.xml to android:name=".Main" (app is stop running):
<activity
android:name=".Main"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
All activities need to be defined in the manifest.
You are replacing the original Activity with the new one and now the old one is not defined. So you just need to add it back.
<application
...
/>
<activity
android:name=".ClassifierActivity"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
</activity>
<activity
android:name=".Main"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
I recently started working with android and I wanted to program my own Pedometer app that counts how many steps I have taken. Sadly somehow the onSensorChanged() method isn't called. I made sure my phone has the sensor and I also made sure that everything else is working fine (registering a listener, creating a SensorManager and a Sensor) and everything seems to be working fine. I have als added
uses-permission android:name="android.permission.ACTIVITY_RECOGNITION" in the android manifest since another post stated this is needed. Here is my code:
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.List;
public class MainActivity extends AppCompatActivity implements SensorEventListener{
Button btnReset;
private SensorManager sensorManager;
private Sensor stepSensor;
private boolean isStepSensorAvailable;
private TextView txtSteps, txtCheck;
private int stepCounter;
private List<Sensor> deviceSensors; //bugfixing
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initialize();
if(sensorManager.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR) != null)
{
stepSensor = sensorManager.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR);
txtCheck.setText("Sensor was found, stepsensor is now " + stepSensor.getName());
isStepSensorAvailable = true;
}
else
{
txtSteps.setText(getString(R.string.txt_step_sensor_not_available));
isStepSensorAvailable = false;
}
}
private void initialize() {
sensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
txtSteps = findViewById(R.id.txtSteps);
txtCheck = findViewById(R.id.txtCheck);
stepCounter = 0;
btnReset = findViewById(R.id.btnReset);
btnReset.setVisibility(View.INVISIBLE);
txtSteps.setVisibility(View.INVISIBLE);
}
/**
* Checking for available Sensors
*/
#Override
protected void onResume()
{
super.onResume();
if(sensorManager.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR) != null)
{
sensorManager.registerListener(this, stepSensor, sensorManager.SENSOR_DELAY_FASTEST);
txtCheck.setText(txtCheck.getText() + System.getProperty("line.separator") + "listener registered!");
}
}
#Override
protected void onPause()
{
super.onPause();
if(sensorManager.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR) != null)
{
sensorManager.unregisterListener(this, stepSensor);
}
}
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor == stepSensor)
{
txtCheck.setText(txtCheck.getText() + System.getProperty("line.separator") + "Sensorevent was triggered!");
stepCounter = (int) (stepCounter + event.values[0]);
txtSteps.setText(String.valueOf(stepCounter));
}
}
#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
Thanks in advance for all the help.
Replace
if (event.sensor == stepSensor)
with
if (event.sensor.getType() == Sensor.TYPE_STEP_DETECTOR)
You need to add the following to your AndroidManifest.xml:
<uses-feature
android:name="android.hardware.sensor.stepcounter"
android:required="true"/>
<uses-feature
android:name="android.hardware.sensor.stepdetector"
android:required="true"/>
If you use Android 10 or later, you also need to add the following permission:
<uses-permission android:name="android.permission.ACTIVITY_RECOGNITION"/>
Yesterday i ask a simplified question of my problem, but think its too simplified.
What my programm should do, is to hear a keyword and when he hear it, he should listen to what i said. (like if you told to siri or google now, by saying siri or ok google).
I'm using pocketsphinx for the keyword and the google speechrecognizer for the longer parts. It works, but only for one time. The pocketsphinx is in the MainActivity and the google recognizer is in an extra class (Jarvis).
The programm starts with the pocketsphinx listener, when he hear the KEYPHRASE, he starts the google listener by calling jarvis.startListener() (by the next()-method) and there is the problem, when the googlelistener is done, i dont come back from the Jarvis-class to the MainActivity to call the next() method again.
(when the google recognizer is done, the last things he do is in the onResult() in Jarvis-class, but from there i cant call the next()-method from MainActivity-class)
MainActivity
package com.example.superuser.jarvis;
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import java.io.File;
import java.io.IOException;
import edu.cmu.pocketsphinx.Assets;
import edu.cmu.pocketsphinx.Hypothesis;
import edu.cmu.pocketsphinx.SpeechRecognizer;
import edu.cmu.pocketsphinx.SpeechRecognizerSetup;
import static android.widget.Toast.makeText;
import static edu.cmu.pocketsphinx.SpeechRecognizerSetup.defaultSetup;
public class MainActivity extends Activity implements edu.cmu.pocketsphinx.RecognitionListener {
private String LOG_TAG = "Jarvis_hears_anything";
private TextView tv;
private Jarvis jarvis;
private boolean wannahearjarvis = false;
/* Named searches allow to quickly reconfigure the decoder */
private static final String KWS_SEARCH = "wakeup";
/* Keyword we are looking for to activate menu */
private static final String KEYPHRASE = "jarvis";
private edu.cmu.pocketsphinx.SpeechRecognizer recognizer;
//private HashMap<String, Integer> captions;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final Button button = (Button) findViewById(R.id.b1);
tv = (TextView) findViewById(R.id.tv1);
//captions = new HashMap<String, Integer>();
//captions.put(KWS_SEARCH, R.string.kws_caption);
jarvis = new Jarvis(getApplicationContext());
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(MainActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
((TextView) findViewById(R.id.tv1))
.setText("Failed to init recognizer " + result);
} else {
//switchSearch(KWS_SEARCH);
recognizer.startListening(KWS_SEARCH);
}
}
}.execute();
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "geht", Toast.LENGTH_SHORT).show();
}
});
}
public void next(){
if (wannahearjarvis){
recognizer.startListening(KWS_SEARCH);
wannahearjarvis = false;
}
else{
jarvis.startListening();
wannahearjarvis = true;
}
}
#Override
public void onDestroy() {
super.onDestroy();
recognizer.cancel();
recognizer.shutdown();
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
if (text.equals(KEYPHRASE)){
tv.append("found");
recognizer.stop();
//switchSearch(KWS_SEARCH);
}
else {
//((TextView) findViewById(R.id.tv1)).append(text+"PR");
//Log.i(LOG_TAG, text+"PR");
}
}
/**
* This callback is called when we stop the recognizer.
*/
#Override
public void onResult(Hypothesis hypothesis) {
//((TextView) findViewById(R.id.tv1)).setText("");
((TextView) findViewById(R.id.tv1)).append("oR");
if (hypothesis != null) {
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
next();
}
#Override
public void onBeginningOfSpeech() {
}
/**
* We stop recognizer here to get a final result
*/
#Override
public void onEndOfSpeech() {
if (!recognizer.getSearchName().equals(KWS_SEARCH)){
tv.append("fuck");
}
//switchSearch(KWS_SEARCH);
}
/*private void switchSearch(String searchName) {
recognizer.stop();
// If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
if (searchName.equals(KWS_SEARCH))
recognizer.startListening(searchName);
else
recognizer.startListening(searchName, 10000);
//String caption = getResources().getString(captions.get(searchName));
//((TextView) findViewById(R.id.tv1)).setText(caption);
//((TextView) findViewById(R.id.tv1)).append(caption);
}*/
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
// To disable logging of raw audio comment out this call (takes a lot of space on the device)
.setRawLogDir(assetsDir)
// Threshold to tune for keyphrase to balance between false alarms and misses
.setKeywordThreshold(1e-20f)
// Use context-independent phonetic search, context-dependent is too slow for mobile
.setBoolean("-allphone_ci", true)
.getRecognizer();
recognizer.addListener(this);
/** In your application you might not need to add all those searches.
* They are added here for demonstration. You can leave just one.
*/
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
}
#Override
public void onError(Exception error) {
((TextView) findViewById(R.id.tv1)).setText(error.getMessage());
}
#Override
public void onTimeout() {
//switchSearch(KWS_SEARCH);
}
}
Jarvis
package com.example.superuser.jarvis;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.widget.Toast;
import java.util.ArrayList;
public class Jarvis implements RecognitionListener{
private AudioManager audiom;
private SpeechRecognizer speech;
private Intent recogIntent;
private Toast m;
private Context c;
private String text;
public Jarvis(Context context){
speech = SpeechRecognizer.createSpeechRecognizer(context);
speech.setRecognitionListener(this);
recogIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
recogIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "de");
//recogIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
m = new Toast(context);
c=context;
}
public void startListening(){
speech.startListening(recogIntent);
}
public void destroy(){
speech.stopListening();
speech.cancel();
speech.destroy();
}
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float rmsdB) {
}
#Override
public void onBufferReceived(byte[] buffer) {
}
#Override
public void onEndOfSpeech() {
}
#Override
public void onError(int error) {
}
#Override
public void onResults(Bundle results) {
ArrayList<String> matches = results
.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
Toast.makeText(c, matches.get(0), Toast.LENGTH_LONG).show();
speech.cancel();
//tried
//MainActivity m = new MainActivity();
//m.next();
//but got a Nullpointer Exception
}
#Override
public void onPartialResults(Bundle partialResults) {
}
#Override
public void onEvent(int eventType, Bundle params) {
}
}
You can store reference to the main activity in Jarvis object in a field:
class Jarvis {
....
private MainActivity m;
....
public Jarvis(MainActivity m) {
this.m = m;
}
....
public void onResults(Bundle results) {
....
m.next();
}
You can also send intents to the main activity as described here. This might be overkill in your case though.
I am trying to make an android application which sets the camera's live feed as wallpaper. I am almost done with the coding part. The thing is when i click the set wallpaper button, the live wallpaper chooser menu opens. But as soon as i select my app in it, the application crashes. I know its kinda existing app, but its a clients demand.
WelcomeActivity.java
package com.wallpaper.transparenthighdefcamera;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
public class WelcomeActivity extends Activity {
ImageButton imgButt;
public WelcomeActivity()
{
}
protected void onCreate(Bundle bundle)
{
super.onCreate(bundle);
setContentView(R.layout.activity_main);
imgButt=(ImageButton) findViewById(R.id.setButton);
imgButt.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = new Intent();
if (android.os.Build.VERSION.SDK_INT >= 16)
{
intent.setAction("android.service.wallpaper.CHANGE_LIVE_WALLPAPER");
intent.putExtra("android.service.wallpaper.extra.LIVE_WALLPAPER_COMPONENT", new ComponentName("com.wallpaper.transparenthighdefcamera", "com.wallpaper.transparenthighdefcamera.TransparentWallpaperService"));
} else
intent.setAction("android.service.wallpaper.LIVE_WALLPAPER_CHOOSER");
intent.putExtra("android.service.wallpaper.extra.LIVE_WALLPAPER_COMPONENT", new ComponentName("com.wallpaper.transparenthighdefcamera", "com.wallpaper.transparenthighdefcamera.TransparentWallpaperService"));
//}
startActivity(intent);
}
});
}
}
TransparentWallpaperService.java:
package com.wallpaper.transparenthighdefcamera;
import com.wallpaper.transparenthighdefcamera.GenericaCamera;
import android.service.wallpaper.WallpaperService;
import android.view.SurfaceHolder;
public class TransparentWallpaperService extends WallpaperService {
private class MyWallpaperEngine extends android.service.wallpaper.WallpaperService.Engine
{
GenericaCamera GC;
final TransparentWallpaperService this$0;
public void onCreate(SurfaceHolder surfaceholder)
{
if (GC == null)
{
try
{
if (TransparentWallpaperService.existing != null)
{
TransparentWallpaperService.existing.destroyExisting();
}
}
catch (Exception exception) { }
GC = new GenericaCamera(surfaceholder, getBaseContext());
TransparentWallpaperService.existing = GC;
}
super.onCreate(surfaceholder);
}
private MyWallpaperEngine()
{
super();
this$0 = TransparentWallpaperService.this;
}
MyWallpaperEngine(MyWallpaperEngine mywallpaperengine)
{
this();
}
}
public static GenericaCamera existing;
public TransparentWallpaperService()
{
}
public void onCreate()
{
super.onCreate();
}
public android.service.wallpaper.WallpaperService.Engine onCreateEngine()
{
return new MyWallpaperEngine(null);
}
public void onDestroy()
{
super.onDestroy();
}
}
GenricaCamera.java:
package com.wallpaper.transparenthighdefcamera;
import java.util.Iterator;
import android.content.Context;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import android.widget.Toast;
public class GenericaCamera implements android.view.SurfaceHolder.Callback {
private static boolean isPreviewRunning = false;
private Camera cameraDevice;
private SurfaceHolder cameraSurfaceHolder;
private Context context;
public GenericaCamera(SurfaceHolder surfaceholder, Context context1)
{
cameraDevice = null;
cameraSurfaceHolder = null;
context = context1;
cameraSurfaceHolder = surfaceholder;
cameraSurfaceHolder.setType(3);
cameraSurfaceHolder.addCallback(this);
}
private static android.hardware.Camera.Size getBestPreviewSize(int i, int j, android.hardware.Camera.Parameters parameters)
{
android.hardware.Camera.Size size = null;
Iterator iterator = parameters.getSupportedPreviewSizes().iterator();
do
{
android.hardware.Camera.Size size1;
do
{
if (!iterator.hasNext())
{
return size;
}
size1 = (android.hardware.Camera.Size)iterator.next();
} while (size1.width > i || size1.height > j);
if (size == null)
{
size = size1;
} else
{
int k = size.width * size.height;
if (size1.width * size1.height > k)
{
size = size1;
}
}
} while (true);
}
public void destroyExisting()
{
if (cameraDevice != null)
{
cameraDevice.stopPreview();
cameraDevice.setPreviewCallback(null);
cameraDevice.release();
cameraDevice = null;
}
isPreviewRunning = false;
}
public void surfaceChanged(SurfaceHolder surfaceholder, int i, int j, int k)
{
if (cameraDevice != null)
{
if (isPreviewRunning)
{
cameraDevice.stopPreview();
}
android.hardware.Camera.Parameters parameters = cameraDevice.getParameters();
android.hardware.Camera.Size size = getBestPreviewSize(j, k, parameters);
if (size != null)
{
parameters.setPreviewSize(size.width, size.height);
}
cameraDevice.setParameters(parameters);
cameraDevice.startPreview();
isPreviewRunning = true;
}
}
public void surfaceCreated(SurfaceHolder surfaceholder)
{
try
{
if (cameraDevice == null)
{
cameraDevice = Camera.open();
cameraDevice.setDisplayOrientation(90);
cameraDevice.setPreviewDisplay(cameraSurfaceHolder);
}
cameraDevice.startPreview();
return;
}
catch (Exception exception)
{
Toast.makeText(context, "Can't create preview!", 1).show();
exception.printStackTrace();
return;
}
}
public void surfaceDestroyed(SurfaceHolder surfaceholder)
{
if (cameraDevice == null)
{
return;
} else
{
destroyExisting();
return;
}
}
}
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.wallpaper.transparenthighdefcamera"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="18" />
<supports-screens android:anyDensity="true" android:smallScreens="true" android:normalScreens="true" android:largeScreens="true" android:xlargeScreens="true" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.SET_WALLPAPER" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.software.live_wallpaper" android:required="true" />
<application
android:allowBackup="true"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.wallpaper.transparenthighdefcamera.WelcomeActivity"
android:label="#string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service android:name=".services.TransparentCameraHelperService" />
<service android:name=".services.TransparentWallpaperService" android:permission="android.permission.BIND_WALLPAPER" android:enabled="true">
<intent-filter>
<action android:name="android.service.wallpaper.WallpaperService" />
</intent-filter>
<meta-data android:name="android.service.wallpaper" android:resource="#xml/wallpaper" />
</service>
</application>
</manifest>
I do wallpapers to andoid and I want that the user could choose options. Menu with options is showed but it have problem. When i click any options and go back to wallpapers screen, they dont update with new options. Why?
My code WallpaperService:
public MyWallpaperEngine(WallpaperService ws)
{
context = ws;
prefs = LiveWallpaperService.this.getSharedPreferences(SHARED_PREFS_NAME, 0);
OnSharedPreferenceChangeListener listener
= new SharedPreferences.OnSharedPreferenceChangeListener() {
public void onSharedPreferenceChanged(SharedPreferences prefs, String key)
{
if(key != null){
if(key.equals("BACKREPEAT")){
if(BACKREPEAT)
BACKREPEAT = false;
else
BACKREPEAT = true;
}
}
}
};
prefs.registerOnSharedPreferenceChangeListener(listener);
handler.post(drawRunner);
}
upd:
I have made, as was written in an example, but the result hasn't changed..
LiveWallpaperService code:
package com.samples;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.SurfaceHolder;
public class LiveWallpaperService extends WallpaperService
{
public static final String SHARED_PREFS_NAME = "leoSettings01";
#Override
public void onCreate() {
super.onCreate();
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public Engine onCreateEngine() {
return new MyWallpaperEngine();
}
private class MyWallpaperEngine extends Engine implements
SharedPreferences.OnSharedPreferenceChangeListener {
private final Handler handler = new Handler();
int draw_x = 0;
int draw_y = 0;
//...
boolean BACKREPEAT = false;
private final Runnable drawRunner = new Runnable() {
#Override
public void run()
{
draw();
}
};
private boolean visible = true;
private SharedPreferences prefs;
MyWallpaperEngine()
{
prefs = LiveWallpaperService.this.getSharedPreferences(SHARED_PREFS_NAME, 0);
prefs.registerOnSharedPreferenceChangeListener(this);
onSharedPreferenceChanged(prefs, null);
handler.post(drawRunner);
}
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
if(key != null)
{
Log.v("key:", key); //no message!
if(key.equals("BACKREPEAT")){
if(BACKREPEAT)
BACKREPEAT = false;
else
BACKREPEAT = true;
}
}
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawRunner);
} else {
handler.removeCallbacks(drawRunner);
}
}
#Override
public void onSurfaceDestroyed(SurfaceHolder holder) {
super.onSurfaceDestroyed(holder);
this.visible = false;
handler.removeCallbacks(drawRunner);
}
private void draw()
{
SurfaceHolder holder = getSurfaceHolder();
Canvas canvas = null;
try
{
canvas = holder.lockCanvas();
//...draw draw draw
}
finally
{
if (canvas != null)
holder.unlockCanvasAndPost(canvas);
}
handler.removeCallbacks(drawRunner);
if (visible)
{
handler.postDelayed(drawRunner, DELAY);
}
}
}
}
prefs.xml:
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<PreferenceCategory android:title="General">
<CheckBoxPreference
android:title="Animation repeat"
android:key="BACKREPEAT"
android:defaultValue="false"
/>
</PreferenceCategory>
</PreferenceScreen>
Your wallpaper will never restart after coming back from settings. The only method invoked will be your Engine's onVisibilityChanged. If your correctly implements SharedPreferences.OnSharedPreferenceChangeListener on your Engine, then onSharedPreferenceChanged should be called too.
Please check if you implemented OnSharedPreferenceChangeListener exactly this way:
http://developer.android.com/resources/samples/CubeLiveWallpaper/src/com/example/android/livecubes/cube2/CubeWallpaper2.html
If you did and it is still not working, please post your entire WallpaperService and settings preference activity code here.