diff --git a/build.gradle b/build.gradle index 1a5ee412..88b507f5 100644 --- a/build.gradle +++ b/build.gradle @@ -1,11 +1,11 @@ buildscript { - ext.kotlin_version = '1.3.61' + ext.kotlin_version = '1.3.72' repositories { google() jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:3.5.3' + classpath 'com.android.tools.build:gradle:4.1.1' classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } @@ -44,7 +44,7 @@ allprojects { version_number = getVersionName() group_info = "haven" buildToolsVersion = "29.0.2" - minSdkVersion = 16 + minSdkVersion = 21 targetSdkVersion = 29 } @@ -59,7 +59,7 @@ allprojects { android { compileSdkVersion 29 - buildToolsVersion = '29.0.2' + buildToolsVersion = '29.0.3' packagingOptions { exclude 'META-INF/LICENSE.txt' @@ -82,7 +82,7 @@ android { versionCode 2001 versionName getVersionName() archivesBaseName = "Haven-$versionName" - minSdkVersion 16 + minSdkVersion 21 targetSdkVersion 29 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 @@ -122,7 +122,7 @@ android { sourceCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_1_8 } - + kotlinOptions { jvmTarget = "1.8" } @@ -142,13 +142,13 @@ configurations { } dependencies { - implementation 'androidx.appcompat:appcompat:1.1.0' - implementation 'androidx.constraintlayout:constraintlayout:2.0.0-beta4' + implementation 'androidx.appcompat:appcompat:1.2.0' + implementation 'androidx.constraintlayout:constraintlayout:2.1.0-alpha1' implementation "androidx.coordinatorlayout:coordinatorlayout:1.1.0" - implementation 'androidx.preference:preference:1.1.0' + implementation 'androidx.preference:preference:1.1.1' implementation 'androidx.multidex:multidex:2.0.1' implementation 'androidx.annotation:annotation:1.1.0' - implementation 'com.google.android.material:material:1.2.0-alpha03' + implementation 'com.google.android.material:material:1.3.0-alpha04' implementation 'com.google.code.gson:gson:2.8.6' implementation 'com.wdullaer:materialdatetimepicker:4.2.3' implementation 'com.github.guardianproject:signal-cli-android:v0.6.0-android-beta-1' @@ -169,8 +169,6 @@ dependencies { implementation 'com.github.halilozercan:BetterVideoPlayer:1.1.0' implementation 'com.github.ybq:Android-SpinKit:1.4.0' implementation 'io.github.silvaren:easyrs:0.5.3' - implementation 'org.jcodec:jcodec:0.2.5' - implementation 'org.jcodec:jcodec-android:0.2.5' implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version" // Room @@ -178,20 +176,31 @@ dependencies { kapt "android.arch.persistence.room:compiler:2.1.0" implementation "android.arch.lifecycle:runtime:2.1.0" implementation "android.arch.lifecycle:extensions:2.1.0" - - testImplementation 'junit:junit:4.13' - androidTestImplementation 'androidx.test:runner:1.2.0' - androidTestImplementation 'androidx.test:core:1.2.0' - androidTestImplementation 'androidx.test:rules:1.2.0' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' - androidTestImplementation 'androidx.test.espresso:espresso-intents:3.2.0' + implementation "androidx.lifecycle:lifecycle-common-java8:2.2.0" + + testImplementation 'junit:junit:4.13.1' + androidTestImplementation 'androidx.test:runner:1.3.0' + androidTestImplementation 'androidx.test:core:1.3.0' + androidTestImplementation 'androidx.test:rules:1.3.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' + androidTestImplementation 'androidx.test.espresso:espresso-intents:3.3.0' androidTestImplementation "android.arch.persistence.room:testing:2.1.0" // android-job implementation 'com.evernote:android-job:1.4.2' - // new camera view: https://github.com/natario1/CameraView - implementation 'com.otaliastudios:cameraview:2.4.0' + // CameraX core library using camera2 implementation + implementation "androidx.camera:camera-camera2:1.0.0-beta12" + // CameraX Lifecycle Library + implementation "androidx.camera:camera-lifecycle:1.0.0-beta12" + // CameraX View class + implementation "androidx.camera:camera-view:1.0.0-alpha19" + // coroutines + implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core-common:1.3.7' + implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.7' + implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.3.9' + implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.9' + debugImplementation 'com.squareup.leakcanary:leakcanary-android:2.2' } diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 6c70f8ba..14c1226d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,8 +1,8 @@ -#Sat Dec 07 05:36:14 EST 2019 +#Fri Dec 04 23:39:04 IST 2020 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.1-rc-3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip android.useAndroidX=true android.enableD8=true diff --git a/src/main/java/org/havenapp/main/MonitorActivity.java b/src/main/java/org/havenapp/main/MonitorActivity.java index e82134f4..71a85566 100644 --- a/src/main/java/org/havenapp/main/MonitorActivity.java +++ b/src/main/java/org/havenapp/main/MonitorActivity.java @@ -17,15 +17,14 @@ package org.havenapp.main; import android.Manifest; -import android.animation.ValueAnimator; import android.app.PictureInPictureParams; +import android.app.ProgressDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.content.res.Configuration; -import android.graphics.Color; import android.os.Build; import android.os.Bundle; import android.os.CountDownTimer; @@ -40,7 +39,11 @@ import android.view.animation.AnimationUtils; import android.widget.Button; import android.widget.TextView; -import android.widget.Toast; + +import androidx.appcompat.app.AppCompatActivity; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import androidx.localbroadcastmanager.content.LocalBroadcastManager; import com.wdullaer.materialdatetimepicker.time.TimePickerDialog; @@ -55,11 +58,6 @@ import java.io.FileOutputStream; import java.io.IOException; -import androidx.appcompat.app.AppCompatActivity; -import androidx.core.app.ActivityCompat; -import androidx.core.content.ContextCompat; -import androidx.localbroadcastmanager.content.LocalBroadcastManager; - import static org.havenapp.main.Utils.getTimerText; public class MonitorActivity extends AppCompatActivity implements TimePickerDialog.OnTimeSetListener { @@ -83,6 +81,8 @@ public class MonitorActivity extends AppCompatActivity implements TimePickerDial private Animation mAnimShake; private TextView txtStatus; + private ProgressDialog progressDialog; + private int lastEventType = -1; /** @@ -134,11 +134,14 @@ public void handleMessage(Message msg) { BroadcastReceiver receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { - int eventType = intent.getIntExtra("type",-1); - boolean detected = intent.getBooleanExtra("detected",true); - if (detected) - handler.sendEmptyMessage(eventType); + if (eventType == MonitorService.MSG_STOP_SELF) { + notifyMonitoringEnded(); + } else { + boolean detected = intent.getBooleanExtra("detected", true); + if (detected) + handler.sendEmptyMessage(eventType); + } } }; @@ -177,8 +180,7 @@ private void initSetupLayout() { preferences = new PreferenceManager(getApplicationContext()); setContentView(R.layout.activity_monitor); - txtTimer = (TextView) findViewById(R.id.timer_text); - View viewTimer = findViewById(R.id.timer_container); + txtTimer = findViewById(R.id.timer_text); int timeM = preferences.getTimerDelay() * 1000; @@ -233,8 +235,6 @@ private void initSetupLayout() { } private void configCamera() { - - mFragmentCamera.stopCamera(); startActivityForResult(new Intent(this, CameraConfigureActivity.class),REQUEST_CAMERA); } @@ -259,8 +259,8 @@ private void doCancel() { if (mIsMonitoring) { mIsMonitoring = false; - stopService(new Intent(this, MonitorService.class)); - finish(); + showAlertDialog(); + mFragmentCamera.stopMonitoring(); } else { findViewById(R.id.btnStartNow).setVisibility(View.VISIBLE); @@ -313,17 +313,8 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) { } else if (requestCode == REQUEST_CAMERA) { - mFragmentCamera.initCamera(); - } - } - - @Override - protected void onDestroy() { - if (!mIsMonitoring) - { - mFragmentCamera.stopCamera(); +// mFragmentCamera.initCamera(); } - super.onDestroy(); } private void initTimer() { @@ -475,4 +466,17 @@ public void onTimeSet(TimePickerDialog view, int hourOfDay, int minute, int seco updateTimerValue(delaySeconds); } + private void showAlertDialog() { + progressDialog = new ProgressDialog(this); + progressDialog.setMessage(getString(R.string.finishing_up)); + progressDialog.setCancelable(false); + progressDialog.show(); + } + + private void notifyMonitoringEnded() { + if (progressDialog != null && progressDialog.isShowing()) { + progressDialog.dismiss(); + } + finish(); + } } diff --git a/src/main/java/org/havenapp/main/PreferenceManager.java b/src/main/java/org/havenapp/main/PreferenceManager.java index bbb4151e..2540b4f6 100644 --- a/src/main/java/org/havenapp/main/PreferenceManager.java +++ b/src/main/java/org/havenapp/main/PreferenceManager.java @@ -29,6 +29,7 @@ import androidx.appcompat.app.AppCompatActivity; import org.havenapp.main.sensors.motion.LuminanceMotionDetector; +import org.havenapp.main.storage.SharedPreferenceStringLiveData; import java.io.File; import java.util.Date; @@ -70,6 +71,7 @@ public class PreferenceManager { public static final String VERIFY_SIGNAL = "verify_signal"; public static final String VOICE_VERIFY_SIGNAL = "voice_verify_signal"; public static final String RESET_SIGNAL_CONFIG = "reset_signal_config"; + public static final String SIMULTANEOUS_IMAGE_MONITORING = "simultaneous_image_monitoring"; private static final String UNLOCK_CODE="unlock_code"; private static final String ACCESS_TOKEN="access_token"; @@ -237,6 +239,15 @@ public boolean getVideoMonitoringActive() { return appSharedPrefs.getBoolean(context.getResources().getString(R.string.video_active_preference_key), false); } + public void setSimultaneousImageMonitoring(boolean active) { + prefsEditor.putBoolean(SIMULTANEOUS_IMAGE_MONITORING, active); + prefsEditor.commit(); + } + + public boolean isSimultaneousImageMonitoring() { + return appSharedPrefs.getBoolean(SIMULTANEOUS_IMAGE_MONITORING, false); + } + public void activateCamera(boolean active) { prefsEditor.putBoolean(CAMERA_ACTIVE, active); prefsEditor.commit(); @@ -254,6 +265,11 @@ public void setCamera(String camera) { public String getCamera() { return appSharedPrefs.getString(CAMERA, FRONT); } + + @NonNull + public SharedPreferenceStringLiveData getCameraLiveData() { + return new SharedPreferenceStringLiveData(appSharedPrefs, CAMERA, FRONT); + } public void setCameraSensitivity(int sensitivity) { prefsEditor.putInt(CAMERA_SENSITIVITY, sensitivity); diff --git a/src/main/java/org/havenapp/main/SettingsFragment.java b/src/main/java/org/havenapp/main/SettingsFragment.java index 81a0339c..f3bfe8c3 100644 --- a/src/main/java/org/havenapp/main/SettingsFragment.java +++ b/src/main/java/org/havenapp/main/SettingsFragment.java @@ -9,7 +9,6 @@ import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; -import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; @@ -25,6 +24,17 @@ import android.widget.Switch; import android.widget.Toast; +import androidx.annotation.NonNull; +import androidx.appcompat.app.AlertDialog; +import androidx.appcompat.app.AppCompatActivity; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import androidx.preference.EditTextPreference; +import androidx.preference.ListPreference; +import androidx.preference.Preference; +import androidx.preference.PreferenceFragmentCompat; +import androidx.preference.SwitchPreference; + import com.google.i18n.phonenumbers.PhoneNumberUtil; import com.wdullaer.materialdatetimepicker.time.TimePickerDialog; @@ -38,16 +48,6 @@ import java.io.File; import java.util.Locale; -import androidx.annotation.NonNull; -import androidx.appcompat.app.AlertDialog; -import androidx.appcompat.app.AppCompatActivity; -import androidx.core.app.ActivityCompat; -import androidx.core.content.ContextCompat; -import androidx.preference.EditTextPreference; -import androidx.preference.ListPreference; -import androidx.preference.Preference; -import androidx.preference.PreferenceFragmentCompat; -import androidx.preference.SwitchPreference; import info.guardianproject.netcipher.proxy.OrbotHelper; @@ -309,6 +309,11 @@ protected void save() { preferences.setActivateVideoMonitoring(videoMonitoringActive); + boolean simultaneousImageMonitoring = ((SwitchPreference) findPreference( + PreferenceManager.SIMULTANEOUS_IMAGE_MONITORING)).isChecked(); + + preferences.setSimultaneousImageMonitoring(simultaneousImageMonitoring); + boolean remoteNotificationActive = ((SwitchPreference) findPreference(PreferenceManager.REMOTE_NOTIFICATION_ACTIVE)).isChecked(); preferences.setRemoteNotificationActive(remoteNotificationActive); diff --git a/src/main/java/org/havenapp/main/sensors/MicrophoneMonitor.java b/src/main/java/org/havenapp/main/sensors/MicrophoneMonitor.java index 925bf823..2ea3e0db 100644 --- a/src/main/java/org/havenapp/main/sensors/MicrophoneMonitor.java +++ b/src/main/java/org/havenapp/main/sensors/MicrophoneMonitor.java @@ -102,7 +102,7 @@ public void stop (Context context) { context.unbindService(mConnection); if (microphone != null) - microphone.cancel(true); + microphone.cancelTask(); } diff --git a/src/main/java/org/havenapp/main/sensors/media/MicSamplerTask.java b/src/main/java/org/havenapp/main/sensors/media/MicSamplerTask.java index 934a4d86..4ea54b4a 100644 --- a/src/main/java/org/havenapp/main/sensors/media/MicSamplerTask.java +++ b/src/main/java/org/havenapp/main/sensors/media/MicSamplerTask.java @@ -13,10 +13,14 @@ import android.os.AsyncTask; import android.util.Log; +import androidx.annotation.Nullable; + import java.io.IOException; + public class MicSamplerTask extends AsyncTask { + @Nullable private MicListener listener = null; private AudioCodec volumeMeter = new AudioCodec(); private boolean sampling = true; @@ -30,10 +34,6 @@ public interface MicListener { public void setMicListener(MicListener listener) { this.listener = listener; } - - protected Void onPreExecute(Void...params) { - return null; - } @Override protected Void doInBackground(Void... params) { @@ -106,6 +106,13 @@ public void pause() { @Override protected void onProgressUpdate(Object... progress) { short[] data = (short[]) progress[0]; - listener.onSignalReceived(data); - } + if (listener != null) { + listener.onSignalReceived(data); + } + } + + public void cancelTask() { + cancel(true); + listener = null; + } } diff --git a/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java b/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java index dbab9eb7..6bbee926 100644 --- a/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java +++ b/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java @@ -9,12 +9,13 @@ import android.graphics.Bitmap; import android.graphics.BitmapFactory; -import android.graphics.Color; import android.graphics.ImageFormat; -import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.YuvImage; -import android.os.Handler; + +import androidx.annotation.NonNull; +import androidx.lifecycle.LiveData; +import androidx.lifecycle.MutableLiveData; import org.havenapp.main.sensors.media.ImageCodec; @@ -31,6 +32,9 @@ * */ public class MotionDetector { + + private MutableLiveData> resultEventLiveData = new MutableLiveData<>(); + private MutableLiveData resultLiveData = new MutableLiveData<>(); // Input data @@ -50,14 +54,11 @@ void onProcess( public void addListener(MotionListener listener) { listeners.add(listener); } - - public MotionDetector( - int motionSensitivity) { + + public MotionDetector(int motionSensitivity) { this.motionSensitivity = motionSensitivity; detector = new LuminanceMotionDetector(); detector.setThreshold(motionSensitivity); - - } public void setMotionSensitivity (int motionSensitivity) @@ -118,6 +119,9 @@ public void detect(byte[] rawOldPic, rawBitmap, true); } + MotionDetectorResult result = new MotionDetectorResult(percChanged, true, rawBitmap); + resultLiveData.postValue(result); + resultEventLiveData.postValue(new Event<>(result)); } else { @@ -127,12 +131,11 @@ public void detect(byte[] rawOldPic, null, false); } - + MotionDetectorResult result = new MotionDetectorResult(0, false, null); + resultLiveData.postValue(result); + resultEventLiveData.postValue(new Event<>(result)); } - } - - } public static Bitmap convertImage (byte[] nv21bytearray, int width, int height) @@ -145,5 +148,28 @@ public static Bitmap convertImage (byte[] nv21bytearray, int width, int height) return bitmap; } + /** + * A {@link LiveData} to post result from {@link #detect(byte[], byte[], int, int)} in form + * of an {@link Event} + *

+ * We can use either this or {@link MotionListener} but this will be Lifecycle aware + * + * @return a {@link LiveData} to observe for motion detection result + */ + @NonNull + public LiveData> getResultEventLiveData() { + return resultEventLiveData; + } + /** + * A {@link LiveData} to post result from {@link #detect(byte[], byte[], int, int)} + *

+ * We can use either this or {@link MotionListener} but this will be Lifecycle aware + * + * @return a {@link LiveData} to observe for motion detection result + */ + @NonNull + public LiveData getDetectorResultLiveData() { + return resultLiveData; + } } diff --git a/src/main/java/org/havenapp/main/sensors/motion/MotionDetectorResult.kt b/src/main/java/org/havenapp/main/sensors/motion/MotionDetectorResult.kt new file mode 100644 index 00000000..2cf424b7 --- /dev/null +++ b/src/main/java/org/havenapp/main/sensors/motion/MotionDetectorResult.kt @@ -0,0 +1,23 @@ +package org.havenapp.main.sensors.motion + +import android.graphics.Bitmap + +data class MotionDetectorResult( + val pixelsChanged: Int, + val motionDetected: Boolean, + val rawBitmap: Bitmap? +) + +/** + * A data wrapper class to represent an event, ie, once consumed + * will not be available again. + */ +class Event(data: T) { + private var dataInternal: T? = data + fun consume(): T? { + dataInternal ?: return null + val copy = dataInternal + dataInternal = null + return copy + } +} diff --git a/src/main/java/org/havenapp/main/service/MonitorService.java b/src/main/java/org/havenapp/main/service/MonitorService.java index 9c064e76..b3f24b0e 100644 --- a/src/main/java/org/havenapp/main/service/MonitorService.java +++ b/src/main/java/org/havenapp/main/service/MonitorService.java @@ -16,14 +16,12 @@ import android.content.Intent; import android.content.IntentFilter; import android.graphics.Color; -import android.net.Uri; import android.os.Build; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.os.Messenger; import android.os.PowerManager; -import android.telephony.SmsManager; import android.text.TextUtils; import androidx.annotation.RequiresApi; @@ -49,9 +47,6 @@ import java.util.Date; import java.util.StringTokenizer; -import androidx.annotation.RequiresApi; -import androidx.core.app.NotificationCompat; - @SuppressLint("HandlerLeak") public class MonitorService extends Service { @@ -109,6 +104,14 @@ public void handleMessage(Message msg) { } public final static String KEY_PATH = "path"; + + /** + * An identifier to be used as {@link Message#what} while communicating with this + * Service. + *

+ * This denotes that the current running service must be stopped. + */ + public static final int MSG_STOP_SELF = -2; /** * Messenger interface used by clients to interact @@ -322,6 +325,12 @@ public void alert(int alertType, String value) { iEvent.putExtra("type",alertType); LocalBroadcastManager.getInstance(this).sendBroadcast(iEvent); + if (alertType == MSG_STOP_SELF) { + stopForeground(true); + stopSelf(); + return; + } + if (TextUtils.isEmpty(value)) return; diff --git a/src/main/java/org/havenapp/main/service/SignalSender.java b/src/main/java/org/havenapp/main/service/SignalSender.java index 60944391..24560b1b 100644 --- a/src/main/java/org/havenapp/main/service/SignalSender.java +++ b/src/main/java/org/havenapp/main/service/SignalSender.java @@ -40,7 +40,7 @@ public class SignalSender { private SignalSender(Context context, String username) { - mContext = context; + mContext = context.getApplicationContext(); mUsername = username; mAlertCount = 0; preferences = new PreferenceManager(mContext); diff --git a/src/main/java/org/havenapp/main/storage/SharedPreferenceLiveData.kt b/src/main/java/org/havenapp/main/storage/SharedPreferenceLiveData.kt new file mode 100644 index 00000000..c58cb609 --- /dev/null +++ b/src/main/java/org/havenapp/main/storage/SharedPreferenceLiveData.kt @@ -0,0 +1,84 @@ +package org.havenapp.main.storage + +import android.content.SharedPreferences +import androidx.lifecycle.LiveData + +// Thanks to https://gist.github.com/rharter/1df1cd72ce4e9d1801bd2d49f2a96810 + +abstract class SharedPreferenceLiveData(val sharedPrefs: SharedPreferences, + val key: String, + val defValue: T) : LiveData() { + + private val preferenceChangeListener = SharedPreferences.OnSharedPreferenceChangeListener { sharedPreferences, key -> + if (key == this.key) { + value = getValueFromPreferences(key, defValue) + } + } + + abstract fun getValueFromPreferences(key: String, defValue: T): T + + override fun onActive() { + super.onActive() + value = getValueFromPreferences(key, defValue) + sharedPrefs.registerOnSharedPreferenceChangeListener(preferenceChangeListener) + } + + override fun onInactive() { + sharedPrefs.unregisterOnSharedPreferenceChangeListener(preferenceChangeListener) + super.onInactive() + } +} + +class SharedPreferenceIntLiveData(sharedPrefs: SharedPreferences, key: String, defValue: Int) : + SharedPreferenceLiveData(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: Int): Int = sharedPrefs.getInt(key, defValue) +} + +class SharedPreferenceStringLiveData(sharedPrefs: SharedPreferences, key: String, defValue: String) : + SharedPreferenceLiveData(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: String): String = sharedPrefs.getString(key, defValue)!! +} + +class SharedPreferenceBooleanLiveData(sharedPrefs: SharedPreferences, key: String, defValue: Boolean) : + SharedPreferenceLiveData(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: Boolean): Boolean = sharedPrefs.getBoolean(key, defValue) +} + +class SharedPreferenceFloatLiveData(sharedPrefs: SharedPreferences, key: String, defValue: Float) : + SharedPreferenceLiveData(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: Float): Float = sharedPrefs.getFloat(key, defValue) +} + +class SharedPreferenceLongLiveData(sharedPrefs: SharedPreferences, key: String, defValue: Long) : + SharedPreferenceLiveData(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: Long): Long = sharedPrefs.getLong(key, defValue) +} + +class SharedPreferenceStringSetLiveData(sharedPrefs: SharedPreferences, key: String, defValue: Set) : + SharedPreferenceLiveData>(sharedPrefs, key, defValue) { + override fun getValueFromPreferences(key: String, defValue: Set): Set = sharedPrefs.getStringSet(key, defValue)!! +} + +fun SharedPreferences.intLiveData(key: String, defValue: Int): SharedPreferenceLiveData { + return SharedPreferenceIntLiveData(this, key, defValue) +} + +fun SharedPreferences.stringLiveData(key: String, defValue: String): SharedPreferenceLiveData { + return SharedPreferenceStringLiveData(this, key, defValue) +} + +fun SharedPreferences.booleanLiveData(key: String, defValue: Boolean): SharedPreferenceLiveData { + return SharedPreferenceBooleanLiveData(this, key, defValue) +} + +fun SharedPreferences.floatLiveData(key: String, defValue: Float): SharedPreferenceLiveData { + return SharedPreferenceFloatLiveData(this, key, defValue) +} + +fun SharedPreferences.longLiveData(key: String, defValue: Long): SharedPreferenceLiveData { + return SharedPreferenceLongLiveData(this, key, defValue) +} + +fun SharedPreferences.stringSetLiveData(key: String, defValue: Set): SharedPreferenceLiveData> { + return SharedPreferenceStringSetLiveData(this, key, defValue) +} diff --git a/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java b/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java index 1a2c2c78..8ca11398 100644 --- a/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java +++ b/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java @@ -17,26 +17,20 @@ package org.havenapp.main.ui; import android.Manifest; -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; import android.content.pm.PackageManager; import android.os.Bundle; import android.view.MenuItem; -import android.view.View; import android.widget.TextView; -import android.widget.Toast; - -import org.havenapp.main.PreferenceManager; -import org.havenapp.main.R; +import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.Toolbar; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; -import androidx.localbroadcastmanager.content.LocalBroadcastManager; -import me.angrybyte.numberpicker.listener.OnValueChangeListener; + +import org.havenapp.main.PreferenceManager; +import org.havenapp.main.R; + import me.angrybyte.numberpicker.view.ActualNumberPicker; @@ -45,11 +39,7 @@ public class CameraConfigureActivity extends AppCompatActivity { private PreferenceManager mPrefManager = null; - private boolean mIsMonitoring = false; - private boolean mIsInitializedLayout = false; - private CameraFragment mFragment; - private ActualNumberPicker mNumberTrigger; private TextView mTxtStatus; @Override @@ -72,17 +62,19 @@ private void initLayout() { setTitle(""); getSupportActionBar().setDisplayHomeAsUpEnabled(true); - mFragment = (CameraFragment) getSupportFragmentManager().findFragmentById(R.id.fragment_camera); mTxtStatus = findViewById(R.id.status); + mFragment = (CameraFragment) getSupportFragmentManager().findFragmentById(R.id.fragment_camera); + if (mFragment != null) { + mFragment.analyseFrames(true); + mFragment.motionDetectorLiveData().observe(this, result -> { + mTxtStatus.setText(getString(R.string.percentage_motion_detected, + String.valueOf(result.getPixelsChanged()))); + }); + } - findViewById(R.id.btnCameraSwitch).setOnClickListener(new View.OnClickListener() { - @Override - public void onClick(View v) { - switchCamera(); - } - }); + findViewById(R.id.btnCameraSwitch).setOnClickListener(v -> switchCamera()); - mNumberTrigger = findViewById(R.id.number_trigger_level); + ActualNumberPicker mNumberTrigger = findViewById(R.id.number_trigger_level); mNumberTrigger.setValue(mPrefManager.getCameraSensitivity()); mNumberTrigger.setListener((oldValue, newValue) -> { @@ -90,28 +82,22 @@ public void onClick(View v) { mPrefManager.setCameraSensitivity(newValue); setResult(RESULT_OK); }); - mIsInitializedLayout = true; } private void switchCamera() { - String camera = mPrefManager.getCamera(); if (camera.equals(PreferenceManager.FRONT)) mPrefManager.setCamera(PreferenceManager.BACK); else if (camera.equals(PreferenceManager.BACK)) mPrefManager.setCamera(PreferenceManager.FRONT); - mFragment.updateCamera(); setResult(RESULT_OK); } - - @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: - mFragment.stopCamera(); finish(); break; } @@ -124,21 +110,11 @@ public boolean onOptionsItemSelected(MenuItem item) { */ @Override public void onBackPressed() { - mFragment.stopCamera(); finish(); } @Override - public void onResume() { - super.onResume(); - IntentFilter filter = new IntentFilter(); - filter.addAction("event"); - LocalBroadcastManager.getInstance(this).registerReceiver(receiver,filter ); - - } - - @Override - public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { + public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); switch (requestCode) { @@ -172,28 +148,4 @@ private boolean askForPermission(String permission, Integer requestCode) { return false; } } - - - @Override - protected void onPause() { - super.onPause(); - LocalBroadcastManager.getInstance(this).unregisterReceiver(receiver); - - } - - BroadcastReceiver receiver = new BroadcastReceiver() { - @Override - public void onReceive(Context context, Intent intent) { - - int eventType = intent.getIntExtra("type",-1); - boolean detected = intent.getBooleanExtra("detected",true); - int percChanged = intent.getIntExtra("changed",-1); - - if (percChanged != -1) - { - mTxtStatus.setText(percChanged + "% motion detected"); - } - } - }; - } diff --git a/src/main/java/org/havenapp/main/ui/CameraFragment.java b/src/main/java/org/havenapp/main/ui/CameraFragment.java deleted file mode 100644 index 39bed747..00000000 --- a/src/main/java/org/havenapp/main/ui/CameraFragment.java +++ /dev/null @@ -1,133 +0,0 @@ - -/* - * Copyright (c) 2017 Nathanial Freitas / Guardian Project - * * Licensed under the GPLv3 license. - * - * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato - * Licensed under the MIT license. - */ -package org.havenapp.main.ui; - -import android.content.Intent; -import android.hardware.SensorEvent; -import android.os.Bundle; -import android.view.LayoutInflater; -import android.view.View; -import android.view.ViewGroup; -import android.widget.ImageView; - -import androidx.fragment.app.Fragment; -import androidx.localbroadcastmanager.content.LocalBroadcastManager; - -import com.otaliastudios.cameraview.CameraView; -import com.otaliastudios.cameraview.controls.Audio; - -import org.havenapp.main.PreferenceManager; -import org.havenapp.main.R; -import org.havenapp.main.model.EventTrigger; - -public final class CameraFragment extends Fragment { - - private CameraViewHolder cameraViewHolder; - private ImageView newImage; - private PreferenceManager prefs; - - @Override - public View onCreateView(LayoutInflater inflater, ViewGroup container, - Bundle savedInstanceState) { - - View view = inflater.inflate(R.layout.camera_fragment, container, false); - newImage = view.findViewById(R.id.new_image); - - return view; - - } - - public void setMotionSensitivity (int threshold) - { - cameraViewHolder.setMotionSensitivity(threshold); - } - - @Override - public void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - prefs = new PreferenceManager(getContext()); - } - - @Override - public void onPause() { - super.onPause(); - } - - @Override - public void onResume() { - super.onResume(); - initCamera(); - - cameraViewHolder.setMotionSensitivity(prefs.getCameraSensitivity()); - - - } - - public void updateCamera () - { - if (cameraViewHolder != null) { - cameraViewHolder.updateCamera(); - } - } - - public void stopCamera () - { - if (cameraViewHolder != null) { - cameraViewHolder.stopCamera(); - } - } - - public void initCamera () - { - - - PreferenceManager prefs = new PreferenceManager(getActivity()); - - if (prefs.getCameraActivation()) { - //Uncomment to see the camera - - CameraView cameraView = getActivity().findViewById(R.id.camera_view); - cameraView.setAudio(Audio.OFF); - - if (cameraViewHolder == null) { - cameraViewHolder = new CameraViewHolder(getActivity(), cameraView); - - cameraViewHolder.addListener((percChanged, rawBitmap, motionDetected) -> { - - if (!isDetached()) { - Intent iEvent = new Intent("event"); - iEvent.putExtra("type", EventTrigger.CAMERA); - iEvent.putExtra("detected",motionDetected); - iEvent.putExtra("changed",percChanged); - - LocalBroadcastManager.getInstance(getActivity()).sendBroadcast(iEvent); - } - - }); - } - - } - - cameraViewHolder.startCamera(); - - } - - @Override - public void onDestroy() { - super.onDestroy(); - - if (cameraViewHolder != null) - cameraViewHolder.destroy(); - - } - - public void onSensorChanged(SensorEvent event) { - - } -} \ No newline at end of file diff --git a/src/main/java/org/havenapp/main/ui/CameraFragment.kt b/src/main/java/org/havenapp/main/ui/CameraFragment.kt new file mode 100644 index 00000000..826b1621 --- /dev/null +++ b/src/main/java/org/havenapp/main/ui/CameraFragment.kt @@ -0,0 +1,343 @@ +/* + * Copyright (c) 2017 Nathanial Freitas / Guardian Project + * * Licensed under the GPLv3 license. + * + * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato + * Licensed under the MIT license. + */ +package org.havenapp.main.ui + +import android.content.ComponentName +import android.content.Context +import android.content.Intent +import android.content.ServiceConnection +import android.graphics.ImageFormat +import android.hardware.camera2.CameraManager +import android.net.Uri +import android.os.Bundle +import android.os.IBinder +import android.os.Message +import android.os.Messenger +import android.util.Log +import android.util.Size +import android.view.LayoutInflater +import android.view.View +import android.view.ViewGroup +import androidx.annotation.UiThread +import androidx.annotation.WorkerThread +import androidx.camera.core.* +import androidx.camera.core.impl.OptionsBundle +import androidx.camera.core.impl.VideoCaptureConfig +import androidx.camera.lifecycle.ProcessCameraProvider +import androidx.camera.view.PreviewView +import androidx.core.content.ContextCompat +import androidx.fragment.app.Fragment +import androidx.lifecycle.Observer +import androidx.localbroadcastmanager.content.LocalBroadcastManager +import kotlinx.coroutines.* +import org.havenapp.main.PreferenceManager +import org.havenapp.main.R +import org.havenapp.main.Utils +import org.havenapp.main.model.EventTrigger +import org.havenapp.main.sensors.motion.LuminanceMotionDetector +import org.havenapp.main.sensors.motion.MotionDetector +import org.havenapp.main.service.MonitorService +import org.havenapp.main.usecase.MotionAnalyser +import org.havenapp.main.util.* +import java.io.File +import java.lang.Runnable +import java.text.SimpleDateFormat +import java.util.* +import java.util.concurrent.Executors + +class CameraFragment : Fragment() { + private var prefs: PreferenceManager? = null + + private var preview: Preview? = null + private var imageCapture: ImageCapture? = null + private var imageAnalyzer: ImageAnalysis? = null + private var videoCapture: VideoCapture? = null + private var camera: Camera? = null + + private var recordingEvent = false + + private val job = SupervisorJob() + private val uiScope = CoroutineScope(Dispatchers.Main + job) + + /** + * Sensitivity of motion detection + */ + private var motionSensitivity = LuminanceMotionDetector.MOTION_MEDIUM + + private val motionDetector = MotionDetector(motionSensitivity) + + private val analysisFrameSize = Size(640, 480) + private val motionAnalyser = MotionAnalyser( + ImageFormat.YUV_420_888, + analysisFrameSize, + motionDetector + ) + + /** + * Messenger used to signal motion to the alert service + */ + @Volatile + private var serviceMessenger: Messenger? = null + + private val connection: ServiceConnection = object : ServiceConnection { + override fun onServiceConnected(className: ComponentName, service: IBinder) { + Log.i("CameraFragment", "SERVICE CONNECTED") + // We've bound to LocalService, cast the IBinder and get LocalService instance + serviceMessenger = Messenger(service) + motionAnalyser.setAnalyze(true) + } + + override fun onServiceDisconnected(arg0: ComponentName) { + Log.i("CameraFragment", "SERVICE DISCONNECTED") + motionAnalyser.setAnalyze(false) + serviceMessenger = null + } + } + + private val cameraExecutor = Executors.newSingleThreadExecutor() + private val cameraDispatcher = cameraExecutor.asCoroutineDispatcher() + + override fun onCreateView( + inflater: LayoutInflater, + container: ViewGroup?, + savedInstanceState: Bundle? + ): View? { + return inflater.inflate(R.layout.camera_fragment, container, false) + } + + override fun onViewCreated(view: View, savedInstanceState: Bundle?) { + super.onViewCreated(view, savedInstanceState) + prefs = PreferenceManager(requireContext()) + // We bind to the alert service + requireContext().bindService(Intent(context, MonitorService::class.java), + connection, Context.BIND_ABOVE_CLIENT) + motionDetector.resultEventLiveData.observe(viewLifecycleOwner, Observer { event -> + event?.consume()?.let { + val iEvent = Intent("event").apply { + putExtra("type", EventTrigger.CAMERA) + putExtra("detected", it.motionDetected) + putExtra("changed", it.pixelsChanged) + } + LocalBroadcastManager.getInstance(requireActivity()).sendBroadcast(iEvent) + if (it.motionDetected) { + captureCameraEvent() + } + } + }) + prefs?.cameraLiveData?.observe(viewLifecycleOwner, Observer { + initCamera(it) + }) + } + + override fun onDestroyView() { + requireContext().unbindService(connection) + super.onDestroyView() + } + + override fun onDestroy() { + cameraExecutor.shutdown() + job.cancel() + super.onDestroy() + } + + fun setMotionSensitivity(threshold: Int) { + this.motionSensitivity = threshold + motionDetector.setMotionSensitivity(motionSensitivity) + } + + fun motionDetectorLiveData() = motionDetector.detectorResultLiveData + + fun analyseFrames(analyse: Boolean) = motionAnalyser.setAnalyze(analyse) + + @UiThread + private fun initCamera(cameraPref: String) { + val viewFinder = requireView().findViewById(R.id.pv_preview) + val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext()) + + cameraProviderFuture.addListener(Runnable { + // Used to bind the lifecycle of cameras to the lifecycle owner + val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get() + + val videoMonitoring = prefs?.videoMonitoringActive ?: false + val simultaneousImageMonitoring = prefs?.isSimultaneousImageMonitoring ?: false + + // Preview + preview = Preview.Builder().build() + // image capture + imageCapture = ImageCapture.Builder() + .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY) + .build() + // analysis + imageAnalyzer = ImageAnalysis.Builder() + .setDefaultResolution(analysisFrameSize) // todo + .setMaxResolution(analysisFrameSize) // todo + .build() + .also { + it.setAnalyzer(cameraExecutor, motionAnalyser) + } + // video capture + videoCapture = VideoCapture.Builder() + .setTargetResolution(analysisFrameSize) + .build() + + val outputList = mutableListOf() + + (requireContext().getSystemService(Context.CAMERA_SERVICE) as CameraManager).let { manager -> + manager.cameraIdList.forEach { cameraId -> + val characteristic = manager.getCameraCharacteristics(cameraId) + Log.d("CameraCharacteristics", "$cameraId: ${characteristic.isFullLevel()}, ${characteristic.isLimitedLevelDevice()}, ${characteristic.isLegacyDevice()}") + Log.d("CameraCharacteristics", "${characteristic.checkGuarantee()}") + } + } + + // Select camera + val lensFacing = when (cameraPref) { + PreferenceManager.FRONT -> CameraSelector.LENS_FACING_FRONT + PreferenceManager.BACK -> CameraSelector.LENS_FACING_BACK + else -> CameraSelector.LENS_FACING_BACK // default + } + val cameraSelector = CameraSelector.Builder() + .requireLensFacing(lensFacing) + .build() + + + try { + // Unbind use cases before rebinding + cameraProvider.unbindAll() + + // Bind use cases to camera + camera = cameraProvider.bindToLifecycle( + viewLifecycleOwner, + cameraSelector, + if (simultaneousImageMonitoring) imageCapture else preview, + if (videoMonitoring) videoCapture else imageCapture, + imageAnalyzer + ) + preview?.setSurfaceProvider(viewFinder.surfaceProvider) + } catch (exc: Exception) { + Log.e(TAG, "Use case binding failed", exc) + } + }, ContextCompat.getMainExecutor(requireContext())) + } + + @UiThread + private fun captureCameraEvent() { + if (prefs?.videoMonitoringActive == true) { + recordVideo() + if (prefs?.isSimultaneousImageMonitoring == true) { + takePhoto() + } + } else { + takePhoto() + } + } + + @UiThread + private fun takePhoto() { + // if we are not connected to the service; we are not monitoring + if (serviceMessenger == null) { + return + } + // Get a stable reference of the modifiable image capture use case + val imageCapture = imageCapture ?: return + + // Create timestamped output file to hold the image + val fileImageDir = File(requireContext().getExternalFilesDir(null), prefs!!.defaultMediaStoragePath) + fileImageDir.mkdirs() + val ts = SimpleDateFormat(Utils.DATE_TIME_PATTERN, Locale.getDefault()).format(Date()) + val photoFile = File(fileImageDir, "$ts.detected.original.jpg") + + // Create output options object which contains file + metadata + val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build() + + // Setup image capture listener which is triggered after photo has been taken + imageCapture.takePicture(outputOptions, ContextCompat.getMainExecutor(requireContext()), + object : ImageCapture.OnImageSavedCallback { + override fun onError(exc: ImageCaptureException) { + Log.e(TAG, "Photo capture failed: ${exc.message}", exc) + } + + override fun onImageSaved(output: ImageCapture.OutputFileResults) { + val savedUri = Uri.fromFile(photoFile) + val msg = "Photo capture succeeded: ${savedUri.path}" + Log.d(TAG, msg) + val message = Message().apply { + what = EventTrigger.CAMERA + data.putString(MonitorService.KEY_PATH, savedUri.path) + } + serviceMessenger?.send(message) ?: kotlin.run { + Log.e(TAG, "Failed to send ${savedUri.path} to service") + } + } + }) + } + + @UiThread + private fun recordVideo() { + // don't record if monitoring is not set or already recording event or service is not running + if (prefs?.videoMonitoringActive != true || recordingEvent || serviceMessenger == null) { + return + } + Log.d(TAG, "Start record video") + // get the video monitoring length from prefs in ms + val videoMonitoringLength = prefs?.monitoringTime?.let { it * 1_000L } ?: return + uiScope.launch { + videoCapture?.let { + recordingEvent = true + // Create timestamped output file to hold the image + val fileImageDir = File(requireContext().getExternalFilesDir(null), prefs!!.defaultMediaStoragePath) + fileImageDir.mkdirs() + val ts = SimpleDateFormat(Utils.DATE_TIME_PATTERN, Locale.getDefault()).format(Date()) + val videoFile = File(fileImageDir, "$ts.detected.original.mp4") + val option = VideoCapture.OutputFileOptions.Builder(videoFile).build() + it.startRecording(option, cameraExecutor, object : VideoCapture.OnVideoSavedCallback { + @WorkerThread + override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) { + Log.e(TAG, "Saved video with to ${outputFileResults.savedUri}") + val message = Message().apply { + what = EventTrigger.CAMERA_VIDEO + data.putString(MonitorService.KEY_PATH, videoFile.absolutePath) + } + serviceMessenger?.send(message) ?: kotlin.run { + Log.e(TAG, "Failed to send ${videoFile.absolutePath} to service") + } + } + + @WorkerThread + override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) { + Log.e(TAG, "Failed to save video with error $videoCaptureError, $message, $cause") + } + }) + delay(videoMonitoringLength) + it.stopRecording() + recordingEvent = false + } + } + } + + @UiThread + fun stopMonitoring() { + motionAnalyser.setAnalyze(false) + videoCapture?.stopRecording() + uiScope.launch { + delay(3_000L) + withContext(cameraDispatcher) { + val message = Message().apply { + what = MonitorService.MSG_STOP_SELF + } + serviceMessenger?.send(message) ?: kotlin.run { + Log.e(TAG, "Failed to send $message to service") + } + } + } + } + + companion object { + private val TAG = CameraFragment::class.java.simpleName + } +} diff --git a/src/main/java/org/havenapp/main/ui/CameraViewHolder.java b/src/main/java/org/havenapp/main/ui/CameraViewHolder.java deleted file mode 100644 index 24210750..00000000 --- a/src/main/java/org/havenapp/main/ui/CameraViewHolder.java +++ /dev/null @@ -1,450 +0,0 @@ - -/* - * Copyright (c) 2017 Nathanial Freitas / Guardian Project - * * Licensed under the GPLv3 license. - * - * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato - * Licensed under the MIT license. - */ - -package org.havenapp.main.ui; - -import android.app.Activity; -import android.content.ComponentName; -import android.content.Context; -import android.content.Intent; -import android.content.ServiceConnection; -import android.graphics.Bitmap; -import android.graphics.Matrix; -import android.os.Environment; -import android.os.Handler; -import android.os.IBinder; -import android.os.Message; -import android.os.Messenger; -import android.os.RemoteException; -import android.util.Log; -import android.view.Surface; - -import androidx.annotation.NonNull; - -import com.otaliastudios.cameraview.CameraView; -import com.otaliastudios.cameraview.controls.Facing; -import com.otaliastudios.cameraview.frame.Frame; -import com.otaliastudios.cameraview.frame.FrameProcessor; -import com.otaliastudios.cameraview.size.Size; -import com.otaliastudios.cameraview.size.SizeSelector; - -import org.havenapp.main.PreferenceManager; -import org.havenapp.main.Utils; -import org.havenapp.main.model.EventTrigger; -import org.havenapp.main.sensors.motion.LuminanceMotionDetector; -import org.havenapp.main.sensors.motion.MotionDetector; -import org.havenapp.main.service.MonitorService; -import org.jcodec.api.android.AndroidSequenceEncoder; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; - -public class CameraViewHolder { - - /** - * Object to retrieve and set shared preferences - */ - private PreferenceManager prefs; - - private final static int DETECTION_INTERVAL_MS = 200; - private final static int MAX_CAMERA_WIDTH = 800; - - private List listeners = new ArrayList<>(); - - /** - * Timestamp of the last picture processed - */ - private long lastTimestamp; - /** - * Last picture processed - */ - private byte[] lastPic; - /** - * True IFF there's an async task processing images - */ - private boolean doingVideoProcessing = false; - - /** - * Handler used to update back the UI after motion detection - */ - private final Handler updateHandler = new Handler(); - - /** - * Sensitivity of motion detection - */ - private int motionSensitivity = LuminanceMotionDetector.MOTION_MEDIUM; - - /** - * holder of the CameraView and state of running - */ - private CameraView cameraView = null; - private boolean isCameraStarted = false; - - /** - * Messenger used to signal motion to the alert service - */ - private Messenger serviceMessenger = null; - //private Camera camera; - private Activity context; - private MotionDetector motionDetector; - - AndroidSequenceEncoder encoder; - private File videoFile; - - //for managing bitmap processing - //private RenderScript renderScript; - - private ServiceConnection mConnection = new ServiceConnection() { - - public void onServiceConnected(ComponentName className, - IBinder service) { - Log.i("CameraFragment", "SERVICE CONNECTED"); - // We've bound to LocalService, cast the IBinder and get LocalService instance - serviceMessenger = new Messenger(service); - } - - public void onServiceDisconnected(ComponentName arg0) { - Log.i("CameraFragment", "SERVICE DISCONNECTED"); - serviceMessenger = null; - } - }; - - public CameraViewHolder(Activity context, CameraView cameraView) { - //super(context); - this.context = context; - this.cameraView = cameraView; - //this.renderScript = RenderScript.create(context); // where context can be your activity, application, etc. - - prefs = new PreferenceManager(context); - - motionDetector = new MotionDetector( - motionSensitivity); - - motionDetector.addListener((detectedImage, rawBitmap, motionDetected) -> { - - for (MotionDetector.MotionListener listener : listeners) - listener.onProcess(detectedImage,rawBitmap,motionDetected); - - if (motionDetected) - mEncodeVideoThreadPool.execute(() -> saveDetectedImage(rawBitmap)); - - }); - /* - * We bind to the alert service - */ - this.context.bindService(new Intent(context, - MonitorService.class), mConnection, Context.BIND_ABOVE_CLIENT); - } - - private void saveDetectedImage (Bitmap rawBitmap) - { - if (serviceMessenger != null) { - Message message = new Message(); - message.what = EventTrigger.CAMERA; - - try { - - File fileImageDir = new File(this.context.getExternalFilesDir(null), prefs.getDefaultMediaStoragePath()); - fileImageDir.mkdirs(); - - String ts = new SimpleDateFormat(Utils.DATE_TIME_PATTERN, - Locale.getDefault()).format(new Date()); - - File fileImage = new File(fileImageDir, ts.concat(".detected.original.jpg")); - FileOutputStream stream = new FileOutputStream(fileImage); - rawBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream); - - stream.flush(); - stream.close(); - message.getData().putString(MonitorService.KEY_PATH, fileImage.getAbsolutePath()); - - //store the still match frame, even if doing video - serviceMessenger.send(message); - - if (prefs.getVideoMonitoringActive() && (!doingVideoProcessing)) { - recordVideo(); - - } - - } catch (Exception e) { - // Cannot happen - Log.e("CameraViewHolder", "error creating image", e); - } - } - } - - public void setMotionSensitivity (int - motionSensitivity ) - { - this. - motionSensitivity = motionSensitivity; - motionDetector.setMotionSensitivity(motionSensitivity); - } - - public void addListener(MotionDetector.MotionListener listener) { - listeners.add(listener); - } - - - /** - * Called on the creation of the surface: - * setting camera parameters to lower possible resolution - * (preferred is 640x480) - * in order to minimize CPU usage - */ - public void startCamera() { - - - updateCamera(); - - cameraView.setPlaySounds(false); - cameraView.setPreviewStreamSize(new SizeSelector() { - @NonNull - @Override - public List select(@NonNull List source) { - ArrayList result = new ArrayList<>(); - - for (Size size : source) - { - if (size.getWidth() processNewFrame(data, size)); - } else { - mEncodeVideoThreadPool.execute(() -> recordNewFrame(data, size)); - } - } - }); - - - } - - public void updateCamera () - { - switch (prefs.getCamera()) { - case PreferenceManager.FRONT: - cameraView.setFacing(Facing.FRONT); - break; - case PreferenceManager.BACK: - cameraView.setFacing(Facing.BACK); - break; - default: - // camera = null; - break; - } - } - - // A queue of Runnables - private final BlockingQueue mDecodeWorkQueue = new LinkedBlockingQueue(); - - // Creates a thread pool manager - private ThreadPoolExecutor mDecodeThreadPool = new ThreadPoolExecutor( - 1, // Initial pool size - 1, // Max pool size - 10, - TimeUnit.SECONDS, - mDecodeWorkQueue); - - // A queue of Runnables - private final BlockingQueue mEncodeVideoWorkQueue = new LinkedBlockingQueue(); - - // Creates a thread pool manager - private ThreadPoolExecutor mEncodeVideoThreadPool = new ThreadPoolExecutor( - 1, // Initial pool size - 1, // Max pool size - 10, - TimeUnit.SECONDS, - mEncodeVideoWorkQueue); - - - private Matrix mtxVideoRotate; - - private void recordNewFrame (byte[] data, Size size) - { - if (data != null && size != null) { - int width = size.getWidth(); - int height = size.getHeight(); - Bitmap bitmap = Bitmap.createBitmap(MotionDetector.convertImage(data, width, height), 0, 0, width, height, mtxVideoRotate, true); - - try { - if (encoder != null) - encoder.encodeImage(bitmap); - - bitmap.recycle(); - - } catch (Exception e) { - e.printStackTrace(); - } - } - - } - - private void finishVideoEncoding () - { - try { - encoder.finish(); - - if (serviceMessenger != null) { - Message message = new Message(); - message.what = EventTrigger.CAMERA_VIDEO; - message.getData().putString(MonitorService.KEY_PATH, videoFile.getAbsolutePath()); - try { - serviceMessenger.send(message); - } catch (RemoteException e) { - e.printStackTrace(); - } - } - } catch (IOException e) { - e.printStackTrace(); - } - - } - - private void processNewFrame (byte[] data, Size size) - { - if (data != null && size != null) { - int width = size.getWidth(); - int height = size.getHeight(); - - motionDetector.detect( - lastPic, - data, - width, - height); - - lastPic = data; - } - } - - - private synchronized boolean recordVideo() { - - if (doingVideoProcessing) - return false; - String ts1 = new SimpleDateFormat(Utils.DATE_TIME_PATTERN, - Locale.getDefault()).format(new Date()); - File fileStoragePath = new File(Environment.getExternalStorageDirectory(),prefs.getDefaultMediaStoragePath()); - fileStoragePath.mkdirs(); - - videoFile = new File(fileStoragePath, ts1 + ".mp4"); - - try { - encoder = AndroidSequenceEncoder.createSequenceEncoder(videoFile,5); - - } catch (IOException e) { - e.printStackTrace(); - } - - mtxVideoRotate = new Matrix(); - - if (cameraView.getFacing() == Facing.FRONT) { - mtxVideoRotate.postRotate(-cameraView.getRotation()); - mtxVideoRotate.postScale(-1, 1, cameraView.getWidth() / 2, cameraView.getHeight() / 2); - } - else - mtxVideoRotate.postRotate(cameraView.getRotation()); - - doingVideoProcessing = true; - - int seconds = prefs.getMonitoringTime() * 1000; - updateHandler.postDelayed(() -> { - doingVideoProcessing = false; - finishVideoEncoding(); - }, seconds); - - return true; - } - - - public synchronized void stopCamera () - { - if (cameraView != null) { - cameraView.close(); - } - } - - - public void destroy () - { - if (mConnection != null) { - this.context.unbindService(mConnection); - mConnection = null; - } - stopCamera(); - } - - public int getCorrectCameraOrientation(Facing facing, int orientation) { - - int rotation = context.getWindowManager().getDefaultDisplay().getRotation(); - int degrees = 0; - - switch(rotation){ - case Surface.ROTATION_0: - degrees = 0; - break; - - case Surface.ROTATION_90: - degrees = 90; - break; - - case Surface.ROTATION_180: - degrees = 180; - break; - - case Surface.ROTATION_270: - degrees = 270; - break; - - } - - int result; - if(facing == Facing.FRONT){ - result = (orientation + degrees) % 360; - result = (360 - result) % 360; - }else{ - result = (orientation - degrees + 360) % 360; - } - - return result; - } - - public boolean doingVideoProcessing () - { - return doingVideoProcessing; - } - -} diff --git a/src/main/java/org/havenapp/main/ui/MicrophoneConfigureActivity.java b/src/main/java/org/havenapp/main/ui/MicrophoneConfigureActivity.java index 0ac4d231..70118ef5 100644 --- a/src/main/java/org/havenapp/main/ui/MicrophoneConfigureActivity.java +++ b/src/main/java/org/havenapp/main/ui/MicrophoneConfigureActivity.java @@ -192,7 +192,7 @@ public void onRequestPermissionsResult(int requestCode, String[] permissions, in protected void onDestroy() { super.onDestroy(); if (microphone != null) - microphone.cancel(true); + microphone.cancelTask(); } diff --git a/src/main/java/org/havenapp/main/usecase/MotionAnalyser.kt b/src/main/java/org/havenapp/main/usecase/MotionAnalyser.kt new file mode 100644 index 00000000..02701228 --- /dev/null +++ b/src/main/java/org/havenapp/main/usecase/MotionAnalyser.kt @@ -0,0 +1,83 @@ +package org.havenapp.main.usecase + +import android.graphics.ImageFormat +import android.os.Looper +import android.util.Log +import android.util.Size +import androidx.annotation.WorkerThread +import androidx.camera.core.ImageAnalysis +import androidx.camera.core.ImageProxy +import org.havenapp.main.sensors.motion.MotionDetector +import org.havenapp.main.util.ImageHelper +import kotlin.math.ceil + +private const val DETECTION_INTERVAL_MS = 200L + +class MotionAnalyser( + frameFormat: Int, + private val frameSize: Size, + private val motionDetector: MotionDetector +) : ImageAnalysis.Analyzer { + + private var lastFrame: ByteArray? = null + private val buffer: ByteArray + + @Volatile + private var shouldAnalyse = false + + private var analysisTimestamp = 0L + + init { + assert(frameFormat == ImageFormat.YUV_420_888) + val bitsPerPixel = ImageFormat.getBitsPerPixel(frameFormat) + val sizeInBits: Long = frameSize.height * frameSize.width * bitsPerPixel.toLong() + val bufferSize = ceil(sizeInBits / 8.0).toInt() + buffer = ByteArray(bufferSize) + } + + @WorkerThread + override fun analyze(imageProxy: ImageProxy) { + assert(!isMainThread()) + + if (!shouldAnalyse) { + imageProxy.close() + return + } + + val now = System.currentTimeMillis() + if (analysisTimestamp + DETECTION_INTERVAL_MS > now) { + imageProxy.close() + Log.i(TAG, "Ignoring event due to detection interval policy") + return + } + + val image = imageProxy.image ?: kotlin.run { + imageProxy.close() + return + } + + analysisTimestamp = now + ImageHelper.convertToNV21(image, buffer) + lastFrame?.let { prevFrame -> + motionDetector.detect(prevFrame, buffer, frameSize.width, frameSize.height) + buffer.copyInto(prevFrame) + } ?: kotlin.run { + lastFrame = ByteArray(buffer.size) + buffer.copyInto(lastFrame!!) + } + + imageProxy.close() + } + + fun setAnalyze(analyze: Boolean) { + shouldAnalyse = analyze + } + + private fun isMainThread(): Boolean { + return Looper.getMainLooper().thread === Thread.currentThread() + } + + companion object { + private val TAG = MotionAnalyser::class.java.simpleName + } +} diff --git a/src/main/java/org/havenapp/main/util/CameraHelper.kt b/src/main/java/org/havenapp/main/util/CameraHelper.kt new file mode 100644 index 00000000..ae076a1a --- /dev/null +++ b/src/main/java/org/havenapp/main/util/CameraHelper.kt @@ -0,0 +1,174 @@ +package org.havenapp.main.util + +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraMetadata +import android.os.Build +import androidx.annotation.RequiresApi + +fun CameraCharacteristics.isLimitedLevelDevice(): Boolean = + (this[CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL] == + CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) + +fun CameraCharacteristics.isFullLevel(): Boolean = + (this[CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL] == + CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL) + +fun CameraCharacteristics.isLegacyDevice(): Boolean = + (this[CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL] == + CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) + +@RequiresApi(Build.VERSION_CODES.N) +fun CameraCharacteristics.isLevel3(): Boolean = + (this[CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL] == + CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3) + +@RequiresApi(Build.VERSION_CODES.P) +fun CameraCharacteristics.isLevelExternal(): Boolean = + (this[CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL] == + CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL) + +fun CameraCharacteristics.checkGuarantee(outputList: List): Boolean { + when { + outputList.isEmpty() -> { + return false + } + (Build.VERSION.SDK_INT >= 24) && isLevel3() -> { + return checkGuaranteeForFull(outputList) + } + (Build.VERSION.SDK_INT >= 28) && isLevelExternal() -> { + return checkGuaranteeForLimited(outputList) + } + isLimitedLevelDevice() -> { + return checkGuaranteeForLimited(outputList) + } + isLegacyDevice() -> { + return checkGuaranteeForLegacy(outputList) + } + isFullLevel() -> { + return checkGuaranteeForFull(outputList) + } + } + return false +} + +private fun CameraCharacteristics.checkGuaranteeForLegacy(outputList: List): Boolean { + return when (outputList.size) { + 1 -> true + 2, 3 -> { + var b = true + outputList.forEach { + b = b && ((it.size <= OutputSize.PREVIEW && it.type <= OutputType.YUV) || + (it.size <= OutputSize.MAXIMUM && it.type == OutputType.JPEG)) + } + b + } + else -> false + } +} + +private fun CameraCharacteristics.checkGuaranteeForLimited(outputList: List): Boolean { + return when (outputList.size) { + 1 -> true + 2 -> { + outputList.sortedBy { it.size } + (outputList[0].size <= OutputSize.PREVIEW && outputList[0].type <= OutputType.YUV) && + (outputList[1].size <= OutputSize.RECORD && outputList[1].type <= OutputType.YUV) + } + 3 -> { + outputList.sortedBy { it.size } + when { + outputList[2].size <= OutputSize.RECORD -> { + outputList[2].type == OutputType.JPEG && + (outputList[0].size <= OutputSize.PREVIEW && outputList[0].type == OutputType.PRIV) && + (outputList[1].size <= OutputSize.RECORD && outputList[1].type <= OutputType.YUV) + } + outputList[2].size == OutputSize.MAXIMUM -> { + outputList[2].type == OutputType.JPEG && + (outputList[0].size <= OutputSize.PREVIEW && outputList[0].type == OutputType.YUV) && + (outputList[1].size <= OutputSize.PREVIEW && outputList[1].type == OutputType.YUV) + } + else -> false + } + } + else -> false + } +} + +private fun CameraCharacteristics.checkGuaranteeForFull(outputList: List): Boolean { + return when (outputList.size) { + 1 -> true + 2 -> { + outputList.sortedBy { it.size } + (outputList[0].size <= OutputSize.PREVIEW && outputList[0].type <= OutputType.YUV) && + (outputList[1].size <= OutputSize.MAXIMUM && outputList[1].type <= OutputType.YUV) + } + 3 -> { + outputList.sortedBy { it.size } + when (outputList[2].type) { + OutputType.JPEG -> { + (outputList[0].size <= OutputSize.PREVIEW && outputList[0].type == OutputType.PRIV) && + (outputList[1].size == OutputSize.PREVIEW && outputList[1].type == OutputType.PRIV) + } + OutputType.YUV -> { + (outputList[0].size == OutputSize.S640x480 && outputList[0].type == OutputType.YUV) && + (outputList[1].size <= OutputSize.PREVIEW && outputList[1].type <= OutputType.YUV) + } + else -> { + false + } + } + } + else -> false + } +} + +data class OutputCharacteristics( + val type: OutputType, + val size: OutputSize +) + +enum class OutputType { + /** + * any target whose available sizes are found using + * [android.hardware.camera2.params.StreamConfigurationMap.getOutputSizes] + * with no direct application-visible format + */ + PRIV, + + /** + * target Surface using the ImageFormat.YUV_420_888 format + */ + YUV, + + /** + * refers to the ImageFormat.JPEG format + */ + JPEG, + + /** + * refers to the ImageFormat.RAW_SENSOR format + */ + RAW +} + +enum class OutputSize { + S640x480, + + /** + * refers to the best size match to the device's screen resolution, + * or to 1080p (1920x1080), whichever is smaller. + */ + PREVIEW, + + /** + * refers to the camera device's maximum supported recording resolution, + * as determined by [android.media.CamcorderProfile] + */ + RECORD, + + /** + * refers to the camera device's maximum output resolution for that format or target + * from [android.hardware.camera2.params.StreamConfigurationMap.getOutputSizes] + */ + MAXIMUM +} diff --git a/src/main/java/org/havenapp/main/util/ImageHelper.kt b/src/main/java/org/havenapp/main/util/ImageHelper.kt new file mode 100644 index 00000000..02ae2b21 --- /dev/null +++ b/src/main/java/org/havenapp/main/util/ImageHelper.kt @@ -0,0 +1,82 @@ +package org.havenapp.main.util + +import android.graphics.ImageFormat +import android.media.Image + +/** + * Conversions for [android.media.Image]s into byte arrays. + * + * Thanks to com.otaliastudios:cameraview:2.4.0 + */ +object ImageHelper { + /** + * From https://stackoverflow.com/a/52740776/4288782 . + * The result array should have a size that is at least 3/2 * w * h. + * This is correctly computed by [com.otaliastudios.cameraview.frame.FrameManager]. + * + * @param image input image + * @param result output array + */ + fun convertToNV21(image: Image, result: ByteArray) { + check(image.format == ImageFormat.YUV_420_888) { "CAn only convert from YUV_420_888." } + val width = image.width + val height = image.height + val ySize = width * height + val uvSize = width * height / 4 + val yBuffer = image.planes[0].buffer // Y + val uBuffer = image.planes[1].buffer // U + val vBuffer = image.planes[2].buffer // V + var rowStride = image.planes[0].rowStride + if (image.planes[0].pixelStride != 1) { + throw AssertionError("Something wrong in convertToNV21") + } + var pos = 0 + if (rowStride == width) { // likely + yBuffer[result, 0, ySize] + pos += ySize + } else { + var yBufferPos = width - rowStride // not an actual position + while (pos < ySize) { + yBufferPos += rowStride - width + yBuffer.position(yBufferPos) + yBuffer[result, pos, width] + pos += width + } + } + rowStride = image.planes[2].rowStride + val pixelStride = image.planes[2].pixelStride + if (rowStride != image.planes[1].rowStride) { + throw AssertionError("Something wrong in convertToNV21") + } + if (pixelStride != image.planes[1].pixelStride) { + throw AssertionError("Something wrong in convertToNV21") + } + if (pixelStride == 2 && rowStride == width && uBuffer[0] == vBuffer[1]) { + // maybe V an U planes overlap as per NV21, which means vBuffer[1] + // is alias of uBuffer[0] + val savePixel = vBuffer[1] + vBuffer.put(1, 0.toByte()) + if (uBuffer[0].toInt() == 0) { + vBuffer.put(1, 255.toByte()) + if (uBuffer[0].toInt() == 255) { + vBuffer.put(1, savePixel) + vBuffer[result, ySize, uvSize] + return // shortcut + } + } + + // unfortunately, the check failed. We must save U and V pixel by pixel + vBuffer.put(1, savePixel) + } + + // other optimizations could check if (pixelStride == 1) or (pixelStride == 2), + // but performance gain would be less significant + for (row in 0 until height / 2) { + for (col in 0 until width / 2) { + val vuPos = col * pixelStride + row * rowStride + result[pos++] = vBuffer[vuPos] + result[pos++] = uBuffer[vuPos] + } + } + } +} diff --git a/src/main/res/layout/activity_camera_configure.xml b/src/main/res/layout/activity_camera_configure.xml index 85672da9..297cfebf 100644 --- a/src/main/res/layout/activity_camera_configure.xml +++ b/src/main/res/layout/activity_camera_configure.xml @@ -1,6 +1,7 @@ + tools:text="15% motion detected" /> - - - - - - + android:layout_height="match_parent" /> diff --git a/src/main/res/values/strings.xml b/src/main/res/values/strings.xml index 3454d225..fc06dd79 100644 --- a/src/main/res/values/strings.xml +++ b/src/main/res/values/strings.xml @@ -159,4 +159,8 @@ Reset Signal Configuration? This will clear out all Signal configurations. You will have to register and verify again to use Signal services in the future. Are you sure? Clean deleted logs from device + Finishing up. Please wait. + %1$s%% motion detected + Simultaneous Image Monitoring + Keep on image monitoring along with video monitoring. This will stop the camera preview \ No newline at end of file diff --git a/src/main/res/xml/settings.xml b/src/main/res/xml/settings.xml index 6e733e43..3ffc078e 100644 --- a/src/main/res/xml/settings.xml +++ b/src/main/res/xml/settings.xml @@ -37,6 +37,13 @@ android:defaultValue="false" android:key="@string/video_active_preference_key" android:title="@string/video_monitoring"/> + +