I can't solve this problem. I want to write a flutter plugin for connecting to scales via bluetooth, but the scales sdk It's being used through the activity page, so I tried to find information about the activity usage in the flutter plugin, so I tried to run it. ActivityAware But I can't run the SDK and I can't pass values from extend class of sdk into EventChanel and can't send back to stream flutter.
this Logcat Error
2022-09-03 01:35:18.966 7973-7973/com.example.releep_scale_connect_example E/EventChannel#scan_releep_scale: Failed to open event stream
java.lang.IllegalStateException: System services not available to Activities before onCreate()
at android.app.Activity.getSystemService(Activity.java:6916)
at aicare.net.cn.iweightlibrary.bleprofile.BleProfileServiceReadyActivity.isBLEEnabled(BleProfileServiceReadyActivity.java:299)
at aicare.net.cn.iweightlibrary.bleprofile.BleProfileServiceReadyActivity.startScan(BleProfileServiceReadyActivity.java:329)
at com.example.releep_scale_connect.ReleepScaleConnectPlugin.onListen(ReleepScaleConnectPlugin.java:156)
at io.flutter.plugin.common.EventChannel$IncomingStreamRequestHandler.onListen(EventChannel.java:218)
at io.flutter.plugin.common.EventChannel$IncomingStreamRequestHandler.onMessage(EventChannel.java:197)
at io.flutter.embedding.engine.dart.DartMessenger.invokeHandler(DartMessenger.java:178)
at io.flutter.embedding.engine.dart.DartMessenger.lambda$handleMessageFromDart$0$DartMessenger(DartMessenger.java:206)
at io.flutter.embedding.engine.dart.-$$Lambda$DartMessenger$6ZD1MYkhaLxyPjtoFDxe45u43DI.run(Unknown Source:12)
at android.os.Handler.handleCallback(Handler.java:938)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:236)
at android.app.ActivityThread.main(ActivityThread.java:8051)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:620)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1011)
2022-09-03 01:35:18.978 7973-7973/com.example.releep_scale_connect_example W/Looper: PerfMonitor longMsg : seq=135 plan=01:35:17.167 late=0ms wall=1800ms running=24ms runnable=1ms h=android.os.Handler c=io.flutter.embedding.engine.dart.-$$Lambda$DartMessenger$6ZD1MYkhaLxyPjtoFDxe45u43DI procState=2
this my plugin (ReleepScaleConnectPlugin.java)
I want this SDK to be in flutter plugin format.
this github SDK : https://github.com/evanwork1234/AiFitSDK
public class ReleepScaleConnectPlugin extends BleProfileServiceReadyActivity implements FlutterPlugin, EventChannel.StreamHandler, MethodCallHandler, ActivityAware {
private FlutterActivity activity;
private MethodChannel channel;
private EventChannel stream_chanel;
private DeviceDialog devicesDialog;
private BinaryMessenger binaryMessenger;
private String[] permissionArray = new String[] {
Manifest.permission.BLUETOOTH,
Manifest.permission.BLUETOOTH_ADMIN,
Manifest.permission.ACCESS_COARSE_LOCATION,
Manifest.permission.ACCESS_FINE_LOCATION,
Manifest.permission.INTERNET,
Manifest.permission.ACCESS_NETWORK_STATE
};
#Override
public void onAttachedToEngine(#NonNull FlutterPluginBinding flutterPluginBinding) {
binaryMessenger = flutterPluginBinding.getBinaryMessenger();
}
#Override
public void onMethodCall(#NonNull MethodCall call, #NonNull Result result) {
if (call.method.equals("getPlatformVersion")) {
result.success("Android " + android.os.Build.VERSION.RELEASE);
} else {
result.notImplemented();
}
}
#Override
public void onDetachedFromEngine(#NonNull FlutterPluginBinding binding) {
channel.setMethodCallHandler(null);
}
ArrayList listVal = new ArrayList();
private Gson gson = new Gson();
#Override
public void onListen(Object arguments, EventChannel.EventSink events) {
if (arguments.equals("scan")) {
// devicesDialog.startScan();
startScan();
String json = gson.toJson(listVal);
events.success(json);
}
}
#Override
public void onCancel(Object arguments) {
}
#Override
protected void onError(String s, int i) {
L.e("TAG", "Message = " + s + " errCode = " + i);
// showInfo(getString(R.string.state_error, errMsg, errCode), true);
}
#Override
protected void onGetWeightData(WeightData weightData) {
}
#Override
protected void onGetResult(int i, String s) {
}
#Override
protected void onServiceBinded(WBYService.WBYBinder wbyBinder) {
}
#Override
protected void onServiceUnbinded() {
}
#Override
protected void getAicareDevice(BroadData broadData) {
if (broadData != null) {
L.e("TAG", broadData.toString());
listVal.add(broadData.toString());
}
}
private void startLeScan() {
startScan();
}
private void stopLeScan() {
stopScan();
}
#Override
public void onAttachedToActivity(#NonNull #NotNull ActivityPluginBinding binding) {
L.e("TAG", "onAttachedToActivity");
activity = (FlutterActivity) binding.getActivity();
AiFitSDK.getInstance().init (activity);
channel = new MethodChannel(binaryMessenger, "releep_scale_connect");
channel.setMethodCallHandler(this);
stream_chanel = new EventChannel(binaryMessenger, "scan_releep_scale");
stream_chanel.setStreamHandler(this);
boolean backBoolean = PermissionUtils.checkPermissionArray(activity.getContext(), permissionArray, 3);
// initPermissions();
// Judge whether Bluetooth is on, if you need to change the style, you can do it yourself
if (!isBLEEnabled()) {
showBLEDialog();
}
//startScan();
// devicesDialog = new DeviceDialog(activity.getContext(), this);
}
#Override
public void onDetachedFromActivityForConfigChanges() {
L.e("TAG", "onDetachedFromActivityForConfigChanges");
}
#Override
public void onReattachedToActivityForConfigChanges(#NonNull #NotNull ActivityPluginBinding binding) {
L.e("TAG", "onReattachedToActivityForConfigChanges");
}
#Override
public void onDetachedFromActivity() {
L.e("TAG", "onDetachedFromActivity");
}
}
I want these override methods to be able to pass values back to flutter.
#Override
protected void onGetWeightData(WeightData weightData) {
}
#Override
protected void onGetResult(int i, String s) {
}
#Override
protected void onGetFatData(boolean b, BodyFatData bodyFatData) {
}
I am trying to create a video calling Android application using Agora SDK but I'm running into a problem. I want to capture the video frames and process it before displaying it.
I am using the lib-raw-data from API Examples to capture the raw video frame. I have followed the steps from the documentation https://docs.agora.io/en/Video/raw_data_video_android?platform=Android and checked the examples as well. However, I cannot seem to capture the frame in methods onRenderVideoFrame and onCaptureVideoFrame.
The video calling works perfectly but the callback function does not seem to work. I don't get anything in onRenderVideoFrame or onCaptureVideoFrame.
Below is the code for the Activity:
public class MainActivity extends AppCompatActivity implements MediaDataVideoObserver,
MediaDataAudioObserver {
private static final String TAG = MainActivity.class.getSimpleName();
private static final int PERMISSION_REQ_ID = 22;
private static final String[] REQUESTED_PERMISSIONS = {
Manifest.permission.RECORD_AUDIO,
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
};
private RtcEngine mRtcEngine;
private boolean mCallEnd;
private boolean mMuted;
private FrameLayout mLocalContainer;
private RelativeLayout mRemoteContainer;
private VideoCanvas mLocalVideo;
private VideoCanvas mRemoteVideo;
private ImageView mCallBtn;
private ImageView mMuteBtn;
private ImageView mSwitchCameraBtn;
private MediaDataObserverPlugin mediaDataObserverPlugin;
private final IRtcEngineEventHandler mRtcHandler = new IRtcEngineEventHandler() {
#Override
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
super.onJoinChannelSuccess(channel, uid, elapsed);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("Agora", "Join channel success, uid: " + (uid & 0xFFFFFFFFL));
}
});
}
#Override
public void onFirstRemoteVideoFrame(int uid, int width, int height, int elapsed) {
super.onFirstRemoteVideoFrame(uid, width, height, elapsed);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("Agora", "First remote video decoded, uid: " + (uid & 0xFFFFFFFFL));
setupRemoteVideo(uid);
}
});
}
#Override
public void onUserOffline(int uid, int reason) {
super.onUserOffline(uid, reason);
runOnUiThread(new Runnable() {
#Override
public void run() {
if (mediaDataObserverPlugin != null) {
mediaDataObserverPlugin.removeDecodeBuffer(uid);
}
Log.i("Agora", "User offline, uid: " + (uid & 0xFFFFFFFFL));
onRemoteUserLeft(uid);
}
});
}
#Override
public void onUserJoined(int uid, int elapsed) {
super.onUserJoined(uid, elapsed);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("agora", "Remote user joined, uid: " + (uid & 0xFFFFFFFFL));
setupRemoteVideo(uid);
}
});
}
};
private void onRemoteUserLeft(int uid) {
if (mRemoteVideo != null && mRemoteVideo.uid == uid) {
removeFromParent(mRemoteVideo);
// Destroys remote view
mRemoteVideo = null;
}
}
private ViewGroup removeFromParent(VideoCanvas canvas) {
if (canvas != null) {
ViewParent parent = canvas.view.getParent();
if (parent != null) {
ViewGroup group = (ViewGroup) parent;
group.removeView(canvas.view);
return group;
}
}
return null;
}
private void showButtons(boolean show) {
int visibility = show ? View.VISIBLE : View.GONE;
mMuteBtn.setVisibility(visibility);
mSwitchCameraBtn.setVisibility(visibility);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initUI();
Log.e(TAG, "Media Data Observer registered");
// Ask for permissions at runtime.
if (checkSelfPermission(REQUESTED_PERMISSIONS[0], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[1], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[2], PERMISSION_REQ_ID)) {
initEngineAndJoinChannel();
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onStop() {
super.onStop();
if (mediaDataObserverPlugin != null) {
mediaDataObserverPlugin.removeVideoObserver(this);
mediaDataObserverPlugin.removeAllBuffer();
}
}
private void initUI() {
mLocalContainer = findViewById(R.id.local_video_view_container);
mRemoteContainer = findViewById(R.id.remote_video_view_container);
mCallBtn = findViewById(R.id.btn_call);
mMuteBtn = findViewById(R.id.btn_mute);
mSwitchCameraBtn = findViewById(R.id.btn_switch_camera);
}
private boolean checkSelfPermission(String permission, int requestCode) {
if (ContextCompat.checkSelfPermission(this, permission) !=
PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, REQUESTED_PERMISSIONS, requestCode);
return false;
}
return true;
}
private void initEngineAndJoinChannel() {
initializeEngine();
setupVideoConfig();
setupLocalVideo();
joinChannel();
}
private void joinChannel() {
String token = "<access token>";
mediaDataObserverPlugin = MediaDataObserverPlugin.the();
MediaPreProcessing.setCallback(mediaDataObserverPlugin);
MediaPreProcessing.setVideoCaptureByteBuffer(mediaDataObserverPlugin.byteBufferCapture);
mediaDataObserverPlugin.addVideoObserver(this);
if (TextUtils.isEmpty(token)) {
token = null;
}
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_COMMUNICATION);
mRtcEngine.joinChannel(token, "channel1", "", 0);
}
private void setupRemoteVideo(int uid) {
ViewGroup parent = mRemoteContainer;
if (mLocalVideo != null && parent.indexOfChild(mLocalVideo.view) > -1) {
parent = mLocalContainer;
}
if (mRemoteVideo != null) {
return;
}
if (mediaDataObserverPlugin != null) {
mediaDataObserverPlugin.addDecodeBuffer(uid);
}
SurfaceView view = RtcEngine.CreateRendererView(getBaseContext());
view.setZOrderMediaOverlay(parent == mLocalContainer);
parent.addView(view);
mRemoteVideo = new VideoCanvas(view, VideoCanvas.RENDER_MODE_HIDDEN, uid);
// Initializes the video view of a remote user.
mRtcEngine.setupRemoteVideo(mRemoteVideo);
}
private void setupLocalVideo() {
SurfaceView view = RtcEngine.CreateRendererView(getBaseContext());
view.setZOrderMediaOverlay(true);
mLocalContainer.addView(view);
// Initializes the local video view.
// RENDER_MODE_HIDDEN: Uniformly scale the video until it fills the visible boundaries. One dimension of the video may have clipped contents.
VideoCanvas localVideoCanvas = new VideoCanvas(view, VideoCanvas.RENDER_MODE_HIDDEN, 0);
mRtcEngine.setupLocalVideo(localVideoCanvas);
//mRtcEngine.setVideoSource(new CustomVideoSource());
}
private void setupVideoConfig() {
// In simple use cases, we only need to enable video capturing
// and rendering once at the initialization step.
// Note: audio recording and playing is enabled by default.
mRtcEngine.enableVideo();
mRtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
VideoEncoderConfiguration.VD_640x360,
VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15,
VideoEncoderConfiguration.STANDARD_BITRATE,
VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT));
}
private void initializeEngine() {
try {
mRtcEngine = RtcEngine.create(getBaseContext(), getString(R.string.agora_app_id), mRtcHandler);
} catch (Exception e) {
Log.e("Agora", Log.getStackTraceString(e));
throw new RuntimeException("Check rtc sdk init fatal error - " + Log.getStackTraceString(e));
}
}
private void startCall() {
setupLocalVideo();
joinChannel();
}
private void endCall() {
removeFromParent(mLocalVideo);
mLocalVideo = null;
removeFromParent(mRemoteVideo);
mRemoteVideo = null;
leaveChannel();
}
private void leaveChannel() {
if (mediaDataObserverPlugin != null) {
mediaDataObserverPlugin.removeVideoObserver(this);
mediaDataObserverPlugin.removeAllBuffer();
}
mRtcEngine.leaveChannel();
}
public void onLocalAudioMuteClicked(View view) {
mMuted = !mMuted;
// Stops/Resumes sending the local audio stream.
mRtcEngine.muteLocalAudioStream(mMuted);
int res = mMuted ? R.drawable.btn_mute : R.drawable.btn_unmute;
mMuteBtn.setImageResource(res);
}
public void onSwitchCameraClicked(View view) {
// Switches between front and rear cameras.
mRtcEngine.switchCamera();
}
public void onCallClicked(View view) {
if (mCallEnd) {
startCall();
mCallEnd = false;
mCallBtn.setImageResource(R.drawable.btn_endcall);
} else {
endCall();
mCallEnd = true;
mCallBtn.setImageResource(R.drawable.btn_startcall);
}
showButtons(!mCallEnd);
}
#Override
protected void onDestroy() {
super.onDestroy();
if (!mCallEnd) {
leaveChannel();
}
/*
Destroys the RtcEngine instance and releases all resources used by the Agora SDK.
This method is useful for apps that occasionally make voice or video calls,
to free up resources for other operations when not making calls.
*/
RtcEngine.destroy();
}
#Override
public void onRecordAudioFrame(byte[] data, int audioFrameType, int samples, int bytesPerSample, int channels, int samplesPerSec, long renderTimeMs, int bufferLength) {
}
#Override
public void onPlaybackAudioFrame(byte[] data, int audioFrameType, int samples, int bytesPerSample, int channels, int samplesPerSec, long renderTimeMs, int bufferLength) {
}
#Override
public void onPlaybackAudioFrameBeforeMixing(int uid, byte[] data, int audioFrameType, int samples, int bytesPerSample, int channels, int samplesPerSec, long renderTimeMs, int bufferLength) {
}
#Override
public void onMixedAudioFrame(byte[] data, int audioFrameType, int samples, int bytesPerSample, int channels, int samplesPerSec, long renderTimeMs, int bufferLength) {
}
#Override
public void onCaptureVideoFrame(byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs) {
Log.d(TAG, "onCaptureVideoFrame called");
}
#Override
public void onRenderVideoFrame(int uid, byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs) {
Log.d(TAG, "onRenderVideoFrame called");
}
#Override
public void onPreEncodeVideoFrame(byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs) {
Log.d(TAG, "onPreEncodeVideoFrame0");
}
}
Not sure what I'm doing wrong. Any help would be appreciated. Thanks!
Gradle
plugins {
id 'com.android.application'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.2"
ndkVersion '21.4.7075529'
defaultConfig {
applicationId "com.example.agoratest"
minSdkVersion 26
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.2.1'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation 'io.agora.rtc:full-sdk:3.3.1'
implementation project(path: ':lib-raw-data')
}
I have implemented MVP pattern in my app. And I'm using WeakReferences to store View's reference in my Presenter. But still my fragments are not being claimed by GC upon destroying. Below is the screenshot of problem. Any idea what is causing this and how to remove this issue?
Below is the code for my Presenter:
public class ProductDetailPresenter implements ProductDetailContract.Presenter {
private final WeakReference<ProductDetailContract.View> view;
private CategoriesDataSource repo;
public ProductDetailPresenter(ProductDetailContract.View view, CategoriesDataSource repo) {
this.view = new WeakReference<>(view);
this.repo = repo;
view.setPresenter(this);
}
#Override
public void start() {
}
#Override
public void submitRating(final Product product, final float mRating) {
final ProductDetailContract.View view =ProductDetailPresenter.this.view.get();
if (view != null) {
repo.submitRating(product.getId(), mRating, true, new CategoriesDataSource.SubmitRatingCallback() {
#Override
public void onRatingSubmitted() {
product.setRating(mRating);
product.setRated(true);
product.setUpdatedAt(new Date(System.currentTimeMillis()));
repo.updateProductInDB(product);
if (!view.isActive()) return;
view.onRatingSubmitted(true, mRating);
}
#Override
public void onError(Throwable throwable) {
if (!view.isActive()) return;
view.onRatingSubmitted(false, 0);
}
});
}
}
#Override
public void onRateKarenClicked() {
ProductDetailContract.View view = this.view.get();
if (view != null) {
view.openDialog();
}
}
#Override
public void onAbhiKhareediyeClicked(Product product) {
EventBus.getDefault().post(
new ProductDetailContract.ContractEventMessages(
ProductDetailContract.ContractEventMessages.EVENT_START_QUANTITY_SCREEN, product));
}
}
This is the problem:
#Override
public void submitRating(final Product product, final float mRating) {
final ProductDetailContract.View view =ProductDetailPresenter.this.view.get(); <-- this is bad
you have a final object that is being passed to the repo. Delete the whole line. You don't need it. Use in the view.get() inside the onRatingSubmitted and onError
I am implementing an app introduction and waiver that should appear before the user can access the MainActivity of my Android app. If the user has not accepted the waiver or gone through the app introduction, then my IntroNavigator kicks them back to those activities.
How can I rxify my redirectIfNecessary() method in a more functional manner, instead of the imperative approach I implemented below.
IntroNavigatorImpl.java
public class IntroNavigatorImpl implements IntroNavigator {
WeakReference<Activity> activityWeakReference;
CloudPrefsRepo cloudPrefsRepo;
public IntroNavigatorImpl(Activity activity, CloudPrefsRepo cloudPrefsRepo) {
this.activityWeakReference = new WeakReference<>(activity);
this.cloudPrefsRepo = cloudPrefsRepo;
}
#Override
public void redirectIfNecessary() {
final boolean shouldShowAppIntro = cloudPrefsRepo.shouldShowAppIntro()
.toObservable().toBlocking().first();
final boolean shouldShowWaiver = cloudPrefsRepo.shouldShowWaiver()
.toObservable().toBlocking().first();
if (shouldShowAppIntro) {
showAppIntro();
finishActivity();
} else if (shouldShowWaiver) {
showWaiver();
finishActivity();
} else {
//do nothing
}
}
#Override
public void showWaiver() {
//launch waiver activity
}
#Override
public void showAppIntro() {
//launch app intro activity
}
public void finishActivity() {
if (activityWeakReference.get() != null) {
activityWeakReference.get().finish();
}
}
}
CloudPrefsRepo.java
public interface
/**
* Whether to show the app intro.
*/
Single<Boolean> shouldShowAppIntro();
/**
* Whether to show the waiver. If the user has already
* accepted this waiver, then it shouldn't be shown.
*/
Single<Boolean> shouldShowWaiver();
Edit Based on comment:
It's not the prettiest (I'm leaning towards there not being a good reactive way to do this)
final Action0 showInfoAction = new Action0() {
#Override
public void call() {
showAppIntro();
finishActivity();
}
};
final Action0 showWaiverAction = new Action0() {
#Override
public void call() {
showWaiver();
finishActivity();
}
};
final Action0 blankAction = new Action0() {
#Override
public void call() {
}
};
Observable.zip(shouldShowInfo, shouldShowWaiver, new Func2<Boolean, Boolean, Action0>() {
#Override
public Action0 call(Boolean shoudlShowInfo, Boolean shouldShowWaiver) {
if (shoudlShowInfo) {
return showInfoAction;
} else if (shouldShowWaiver) {
return showWaiverAction;
} else {
return blankAction;
}
}
}).subscribe(new Action1<Action0>() {
#Override
public void call(Action0 action0) {
action0.call();
}
});
I have some trouble with the OnQBVideoChatListener in QuickBlox, the two methods onOpponentVideoDataReceive and onOpponentAudioDataReceive are never called. Here is how I implemented the cameraView and the opponentView. It's basically the same as the sample app given here. I have tried to run the sample Video chat app on my phone and tablet however the same issue occures and only the cameraView gets updated. The opponent view is always black. Is there anyone who has experienced the same trouble and have a solution ?
Thanks a bunch !
private void initViews() {
// Setup UI
opponentView = (OpponentGlSurfaceView) findViewById(R.id.opponentView);
cameraView = (CameraView) findViewById(R.id.cameraView);
cameraView.setCameraFrameProcess(true);
// Set VideoChat listener
cameraView.setQBVideoChatListener(qbVideoChatListener);
// Set Camera init callback
cameraView.setFPS(6);
cameraView.setOnCameraViewListener(new OnCameraViewListener() {
#Override
public void onCameraSupportedPreviewSizes(List<Camera.Size> supportedPreviewSizes) {
Camera.Size firstFrameSize = supportedPreviewSizes.get(0);
Camera.Size lastFrameSize = supportedPreviewSizes.get(supportedPreviewSizes.size() - 1);
cameraView.setFrameSize(firstFrameSize.width > lastFrameSize.width ? lastFrameSize : firstFrameSize);
}
});
// VideoChat settings
videoChatConfig = (VideoChatConfig) GlobalVar.getObject(tag);
try {
QBVideoChatController.getInstance().setQBVideoChatListener((QBUser)GlobalVar.getObject(GlobalVar.CURRENT_USER_KEY), qbVideoChatListener);
} catch (XMPPException e) {
e.printStackTrace();
}
}
OnQBVideoChatListener qbVideoChatListener = new OnQBVideoChatListener() {
#Override
public void onCameraDataReceive(byte[] videoData) {
if (videoChatConfig.getCallType() != CallType.VIDEO_AUDIO) {
//...
}
else{
QBVideoChatController.getInstance().sendVideo(videoData);
Log.i(tag,"videoData sent!"); // THIS IS EXECUTED
}
}
#Override
public void onMicrophoneDataReceive(byte[] audioData) {
QBVideoChatController.getInstance().sendAudio(audioData);
Log.i(tag,"AudioData sent!"); //SO IS THIS
}
#Override
public void onOpponentVideoDataReceive(byte[] videoData) {
Log.i(tag,"received img from opponent"); //NOT CALLED
opponentView.loadOpponentImage(videoData);
}
#Override
public void onOpponentAudioDataReceive(byte[] audioData) {
Log.i(tag,"received Audio from opponent"); //NOT CALLED
QBVideoChatController.getInstance().playAudio(audioData);
}
#Override
public void onProgress(boolean progress) {
}
#Override
public void onVideoChatStateChange(CallState callState, VideoChatConfig chat) {
//... STUFF
}
};