diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cb28079 --- /dev/null +++ b/.gitignore @@ -0,0 +1,87 @@ +# Built application files +*.apk +*.aar +*.ap_ +*.aab + +# Files for the ART/Dalvik VM +*.dex + +# Java class files +*.class + +# Generated files +bin/ +gen/ +out/ + +release/ +/app/release/ + +# Gradle files +.gradle/ +build/ + +# Local configuration file (sdk path, etc) +local.properties + +# Proguard folder generated by Eclipse +proguard/ + +# Log Files +*.log + +# Android Studio Navigation editor temp files +.navigation/ + +# Android Studio captures folder +captures/ + +# IntelliJ +*.iml +.idea/ + +# Keystore files +*.jks +*.keystore + +# External native build folder generated in Android Studio 2.2 and later +.externalNativeBuild +.cxx/ + +# Google Services (e.g. APIs or Firebase) +# google-services.json + +# Freeline +freeline.py +freeline/ +freeline_project_description.json + +# fastlane +fastlane/report.xml +fastlane/Preview.html +fastlane/screenshots +fastlane/test_output +fastlane/readme.md + +# Version control +vcs.xml + +# lint +lint/intermediates/ +lint/generated/ +lint/outputs/ +lint/tmp/ +# lint/reports/ + +# Android Profiling +*.hprof + +# OS-specific files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db diff --git a/README.md b/README.md new file mode 100644 index 0000000..d02d0a3 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +VIRec diff --git a/app/build.gradle b/app/build.gradle new file mode 100644 index 0000000..510a1d9 --- /dev/null +++ b/app/build.gradle @@ -0,0 +1,34 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 30 + defaultConfig { + applicationId "io.a3dv.VIRec" + minSdkVersion 23 + targetSdkVersion 30 + versionCode 1 + versionName "1.0" + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + debug { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } +} + +dependencies { + implementation 'com.jakewharton.timber:timber:4.7.1' + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation 'androidx.constraintlayout:constraintlayout:2.1.0' + implementation "androidx.preference:preference:1.1.1" + implementation 'androidx.appcompat:appcompat:1.3.1' +} diff --git a/app/proguard-rules.pro b/app/proguard-rules.pro new file mode 100644 index 0000000..f1b4245 --- /dev/null +++ b/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..5023137 --- /dev/null +++ b/app/src/main/AndroidManifest.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/ic_launcher-web.png b/app/src/main/ic_launcher-web.png new file mode 100644 index 0000000..4706fa8 Binary files /dev/null and b/app/src/main/ic_launcher-web.png differ diff --git a/app/src/main/java/io/a3dv/VIRec/AboutActivity.java b/app/src/main/java/io/a3dv/VIRec/AboutActivity.java new file mode 100644 index 0000000..d47c1ee --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/AboutActivity.java @@ -0,0 +1,27 @@ +package io.a3dv.VIRec; + +import android.os.Bundle; +import android.text.Spanned; +import android.text.method.LinkMovementMethod; +import android.widget.TextView; + +import androidx.appcompat.app.AppCompatActivity; + +public class AboutActivity extends AppCompatActivity { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.about_activity); + + TextView hyperlink = findViewById(R.id.linkTextView); + String linkText = getResources().getString(R.string.link_foreword); + Spanned text = FileHelper.fromHtml(linkText + " " + + "GitHub."); + hyperlink.setMovementMethod(LinkMovementMethod.getInstance()); + hyperlink.setText(text); + + TextView versionName = findViewById(R.id.versionText); + versionName.setText(getString(R.string.versionName, BuildConfig.VERSION_NAME)); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/AspectFrameLayout.java b/app/src/main/java/io/a3dv/VIRec/AspectFrameLayout.java new file mode 100644 index 0000000..332ceab --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/AspectFrameLayout.java @@ -0,0 +1,71 @@ +package io.a3dv.VIRec; + +import android.content.Context; +import android.util.AttributeSet; +import android.widget.FrameLayout; + +import timber.log.Timber; + +/** + * Layout that adjusts to maintain a specific aspect ratio. + */ +public class AspectFrameLayout extends FrameLayout { + private double mTargetAspect = -1.0; // initially use default window size + + public AspectFrameLayout(Context context) { + super(context); + } + + public AspectFrameLayout(Context context, AttributeSet attrs) { + super(context, attrs); + } + + /** + * Sets the desired aspect ratio. The value is width / height. + */ + public void setAspectRatio(double aspectRatio) { + if (aspectRatio < 0) { + throw new IllegalArgumentException(); + } + Timber.d("Setting aspect ratio to %f (was %f)", aspectRatio, mTargetAspect); + if (mTargetAspect != aspectRatio) { + mTargetAspect = aspectRatio; + requestLayout(); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + // Target aspect ratio will be < 0 if it hasn't been set yet. In that case, + // we just use whatever we've been handed. + if (mTargetAspect > 0) { + int initialWidth = MeasureSpec.getSize(widthMeasureSpec); + int initialHeight = MeasureSpec.getSize(heightMeasureSpec); + + // factor the padding out + int horizPadding = getPaddingLeft() + getPaddingRight(); + int vertPadding = getPaddingTop() + getPaddingBottom(); + initialWidth -= horizPadding; + initialHeight -= vertPadding; + + double viewAspectRatio = (double) initialWidth / initialHeight; + double aspectDiff = mTargetAspect / viewAspectRatio - 1; + + if (Math.abs(aspectDiff) >= 0.01) { + if (aspectDiff > 0) { + // limited by narrow width; restrict height + initialHeight = (int) (initialWidth / mTargetAspect); + } else { + // limited by short height; restrict width + initialWidth = (int) (initialHeight * mTargetAspect); + } + initialWidth += horizPadding; + initialHeight += vertPadding; + widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY); + heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY); + } + } + + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/Camera2Proxy.java b/app/src/main/java/io/a3dv/VIRec/Camera2Proxy.java new file mode 100644 index 0000000..32b747b --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/Camera2Proxy.java @@ -0,0 +1,565 @@ +package io.a3dv.VIRec; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.ImageReader; +import android.media.MediaRecorder; + +import android.os.Handler; +import android.os.HandlerThread; +import androidx.preference.PreferenceManager; +import androidx.annotation.NonNull; + +import android.util.Size; +import android.util.SizeF; +import android.view.Surface; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; + +import java.util.List; + +import timber.log.Timber; + +public class Camera2Proxy { + private final Activity mActivity; + private static SharedPreferences mSharedPreferences; + private String mCameraIdStr = ""; + private final boolean mSecondCamera; + private Size mPreviewSize; + private Size mVideoSize; + private final CameraManager mCameraManager; + private CameraDevice mCameraDevice; + private CameraCaptureSession mCaptureSession; + private CaptureRequest.Builder mPreviewRequestBuilder; + private Rect sensorArraySize; + private Integer mTimeSourceValue; + + private CaptureRequest mPreviewRequest; + private Handler mBackgroundHandler; + private HandlerThread mBackgroundThread; + private ImageReader mImageReader; + private Surface mPreviewSurface; + private SurfaceTexture mPreviewSurfaceTexture = null; + + /** + * Camera state: Showing camera preview. + */ + private static final int STATE_PREVIEW = 0; + + /** + * Wait until the CONTROL_AF_MODE is in auto. + */ + private static final int STATE_WAITING_AUTO = 1; + + /** + * Trigger auto focus algorithm. + */ + private static final int STATE_TRIGGER_AUTO = 2; + + /** + * Camera state: Waiting for the focus to be locked. + */ + private static final int STATE_WAITING_LOCK = 3; + + /** + * Camera state: Focus distance is locked. + */ + private static final int STATE_FOCUS_LOCKED = 4; + /** + * The current state of camera state for taking pictures. + * + * @see #mFocusCaptureCallback + */ + private int mState = STATE_PREVIEW; + + private BufferedWriter mFrameMetadataWriter = null; + + private volatile boolean mRecordingMetadata = false; + + private final FocalLengthHelper mFocalLengthHelper = new FocalLengthHelper(); + + private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { + @Override + public void onOpened(@NonNull CameraDevice camera) { + Timber.d("onOpened"); + mCameraDevice = camera; + initPreviewRequest(); + } + + @Override + public void onDisconnected(@NonNull CameraDevice camera) { + Timber.d("onDisconnected"); + releaseCamera(); + } + + @Override + public void onError(@NonNull CameraDevice camera, int error) { + Timber.w("Camera Open failed with error %d", error); + releaseCamera(); + } + }; + + public Integer getmTimeSourceValue() { + return mTimeSourceValue; + } + + public Size getmVideoSize() { + return mVideoSize; + } + + public void startRecordingCaptureResult(String captureResultFile) { + try { + if (mFrameMetadataWriter != null) { + try { + mFrameMetadataWriter.flush(); + mFrameMetadataWriter.close(); + Timber.d("Flushing results!"); + } catch (IOException err) { + Timber.e(err, "IOException in closing an earlier frameMetadataWriter."); + } + } + mFrameMetadataWriter = new BufferedWriter( + new FileWriter(captureResultFile, true)); + String header = "Timestamp[nanosec],fx[px],fy[px],Frame No.," + + "Exposure time[nanosec],Sensor frame duration[nanosec]," + + "Frame readout time[nanosec]," + + "ISO,Focal length,Focus distance,AF mode,Unix time[nanosec]"; + + mFrameMetadataWriter.write(header + "\n"); + mRecordingMetadata = true; + } catch (IOException err) { + Timber.e(err, "IOException in opening frameMetadataWriter at %s", + captureResultFile); + } + } + +// public void resumeRecordingCaptureResult() { +// mRecordingMetadata = true; +// } +// +// public void pauseRecordingCaptureResult() { +// mRecordingMetadata = false; +// } + + public void stopRecordingCaptureResult() { + if (mRecordingMetadata) { + mRecordingMetadata = false; + } + if (mFrameMetadataWriter != null) { + try { + mFrameMetadataWriter.flush(); + mFrameMetadataWriter.close(); + } catch (IOException err) { + Timber.e(err, "IOException in closing frameMetadataWriter."); + } + mFrameMetadataWriter = null; + } + } + + public Camera2Proxy(Activity activity, boolean secondCamera) { + mActivity = activity; + mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE); + mSecondCamera = secondCamera; // If it's the second camera + } + + public Size configureCamera() { + try { + if (mSecondCamera) { + mCameraIdStr = mSharedPreferences.getString("prefCamera2", "1"); + } else { + mCameraIdStr = mSharedPreferences.getString("prefCamera", "0"); + } + + CameraCharacteristics mCameraCharacteristics = mCameraManager.getCameraCharacteristics(mCameraIdStr); + + String imageSize = mSharedPreferences.getString("prefSizeRaw", + DesiredCameraSetting.mDesiredFrameSize); + int width = Integer.parseInt(imageSize.substring(0, imageSize.lastIndexOf("x"))); + int height = Integer.parseInt(imageSize.substring(imageSize.lastIndexOf("x") + 1)); + + sensorArraySize = mCameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + mTimeSourceValue = mCameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE); + + StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics + .SCALER_STREAM_CONFIGURATION_MAP); + + Size[] videoSizeChoices = map.getOutputSizes(MediaRecorder.class); + mVideoSize = CameraUtils.chooseVideoSize(videoSizeChoices, width, height, width); + + mFocalLengthHelper.setLensParams(mCameraCharacteristics); + mFocalLengthHelper.setmImageSize(mVideoSize); + + mPreviewSize = CameraUtils.chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), + width, height, mVideoSize); + Timber.d("Video size %s preview size %s.", + mVideoSize.toString(), mPreviewSize.toString()); + + } catch (CameraAccessException e) { + Timber.e(e); + } + return mPreviewSize; + } + + @SuppressLint("MissingPermission") + public void openCamera() { + Timber.v("openCamera"); + startBackgroundThread(); + if (mCameraIdStr.isEmpty()) { + configureCamera(); + } + try { + mCameraManager.openCamera(mCameraIdStr, mStateCallback, mBackgroundHandler); + } catch (CameraAccessException e) { + Timber.e(e); + } + } + + public void releaseCamera() { + Timber.v("releaseCamera"); + if (null != mCaptureSession) { + mCaptureSession.close(); + mCaptureSession = null; + } + if (mCameraDevice != null) { + mCameraDevice.close(); + mCameraDevice = null; + } + if (mImageReader != null) { + mImageReader.close(); + mImageReader = null; + } + mPreviewSurfaceTexture = null; + mCameraIdStr = ""; + stopRecordingCaptureResult(); + stopBackgroundThread(); + } + + public void setPreviewSurfaceTexture(SurfaceTexture surfaceTexture) { + mPreviewSurfaceTexture = surfaceTexture; + } + + private static class NumExpoIso { + public Long mNumber; + public Long mExposureNanos; + public Integer mIso; + + public NumExpoIso(Long number, Long expoNanos, Integer iso) { + mNumber = number; + mExposureNanos = expoNanos; + mIso = iso; + } + } + + private final int kMaxExpoSamples = 10; + private final ArrayList expoStats = new ArrayList<>(kMaxExpoSamples); + + private void setExposureAndIso() { + long exposureNanos = DesiredCameraSetting.mDesiredExposureTime; + long desiredIsoL = 30L * 30000000L / exposureNanos; + Integer desiredIso = (int) desiredIsoL; + if (!expoStats.isEmpty()) { + int index = expoStats.size() / 2; + Long actualExpo = expoStats.get(index).mExposureNanos; + Integer actualIso = expoStats.get(index).mIso; + if (actualExpo != null && actualIso != null) { + if (actualExpo <= exposureNanos) { + exposureNanos = actualExpo; + desiredIso = actualIso; + } else { + desiredIsoL = actualIso * actualExpo / exposureNanos; + desiredIso = (int) desiredIsoL; + } + } // else may occur on an emulated device. + } + + boolean manualControl = mSharedPreferences.getBoolean("switchManualControl", false); + if (manualControl) { + float exposureTimeMs = (float) exposureNanos / 1e6f; + String exposureTimeMsStr = mSharedPreferences.getString( + "prefExposureTime", String.valueOf(exposureTimeMs)); + exposureNanos = (long) (Float.parseFloat(exposureTimeMsStr) * 1e6f); + String desiredIsoStr = mSharedPreferences.getString("prefISO", String.valueOf(desiredIso)); + desiredIso = Integer.parseInt(desiredIsoStr); + } + + // fix exposure + mPreviewRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); + + mPreviewRequestBuilder.set( + CaptureRequest.SENSOR_EXPOSURE_TIME, exposureNanos); + Timber.d("Exposure time set to %d", exposureNanos); + + // fix ISO + mPreviewRequestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, desiredIso); + Timber.d("ISO set to %d", desiredIso); + } + + private void initPreviewRequest() { + try { + mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + // Set control elements, we want auto white balance + mPreviewRequestBuilder.set( + CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); + mPreviewRequestBuilder.set( + CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO); + + // We disable customizing focus distance by user input because + // it is less flexible than tap to focus. +// boolean manualControl = mSharedPreferences.getBoolean("switchManualControl", false); +// if (manualControl) { +// String focus = mSharedPreferences.getString("prefFocusDistance", "5.0"); +// Float focusDistance = Float.parseFloat(focus); +// mPreviewRequestBuilder.set( +// CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_OFF); +// mPreviewRequestBuilder.set( +// CaptureRequest.LENS_FOCUS_DISTANCE, focusDistance); +// Timber.d("Focus distance set to %f", focusDistance); +// } + + List surfaces = new ArrayList<>(); + + if (mPreviewSurfaceTexture != null && mPreviewSurface == null) { // use texture view + mPreviewSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), + mPreviewSize.getHeight()); + mPreviewSurface = new Surface(mPreviewSurfaceTexture); + } + surfaces.add(mPreviewSurface); + mPreviewRequestBuilder.addTarget(mPreviewSurface); + + mCameraDevice.createCaptureSession(surfaces, + new CameraCaptureSession.StateCallback() { + + @Override + public void onConfigured(@NonNull CameraCaptureSession session) { + mCaptureSession = session; + mPreviewRequest = mPreviewRequestBuilder.build(); + startPreview(); + } + + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession session) { + Timber.w("ConfigureFailed. session: mCaptureSession"); + } + }, mBackgroundHandler); + } catch (CameraAccessException e) { + Timber.e(e); + } + } + + public void startPreview() { + Timber.v("startPreview"); + if (mCaptureSession == null || mPreviewRequestBuilder == null) { + Timber.w("startPreview: mCaptureSession or mPreviewRequestBuilder is null"); + return; + } + try { + mCaptureSession.setRepeatingRequest( + mPreviewRequest, mFocusCaptureCallback, mBackgroundHandler); + } catch (CameraAccessException e) { + Timber.e(e); + } + } + + /** + * A {@link CameraCaptureSession.CaptureCallback} that handles events related to tap to focus. + * https://stackoverflow.com/questions/42127464/how-to-lock-focus-in-camera2-api-android + */ + private final CameraCaptureSession.CaptureCallback mFocusCaptureCallback + = new CameraCaptureSession.CaptureCallback() { + + private void process(CaptureResult result) { + switch (mState) { + case STATE_PREVIEW: { + // We have nothing to do when the camera preview is working normally. + break; + } + case STATE_WAITING_AUTO: { + Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE); + if (afMode != null && afMode == CaptureResult.CONTROL_AF_MODE_AUTO) { + mState = STATE_TRIGGER_AUTO; + + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_AUTO); + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, + CameraMetadata.CONTROL_AF_TRIGGER_START); + try { + mCaptureSession.capture( + mPreviewRequestBuilder.build(), + mFocusCaptureCallback, mBackgroundHandler); + } catch (CameraAccessException e) { + Timber.e(e); + } + } + break; + } + case STATE_TRIGGER_AUTO: { + mState = STATE_WAITING_LOCK; + + setExposureAndIso(); + + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_AUTO); + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, + CameraMetadata.CONTROL_AF_TRIGGER_IDLE); + try { + mCaptureSession.setRepeatingRequest( + mPreviewRequestBuilder.build(), + mFocusCaptureCallback, mBackgroundHandler); + } catch (CameraAccessException e) { + Timber.e(e); + } + Timber.d("Focus trigger auto"); + break; + } + case STATE_WAITING_LOCK: { + Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); + if (afState == null) { + mState = STATE_FOCUS_LOCKED; + } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || + CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { + mState = STATE_FOCUS_LOCKED; + Timber.d("Focus locked after waiting lock"); + } + break; + } + } + } + + @Override + public void onCaptureProgressed(@NonNull CameraCaptureSession session, + @NonNull CaptureRequest request, + @NonNull CaptureResult partialResult) { + process(partialResult); + } + + @Override + public void onCaptureCompleted(@NonNull CameraCaptureSession session, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + long unixTime = System.currentTimeMillis(); + process(result); + + Long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); + Long number = result.getFrameNumber(); + Long exposureTimeNs = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); + + Long frmDurationNs = result.get(CaptureResult.SENSOR_FRAME_DURATION); + Long frmReadoutNs = result.get(CaptureResult.SENSOR_ROLLING_SHUTTER_SKEW); + Integer iso = result.get(CaptureResult.SENSOR_SENSITIVITY); + if (expoStats.size() > kMaxExpoSamples) { + expoStats.subList(0, kMaxExpoSamples / 2).clear(); + } + expoStats.add(new NumExpoIso(number, exposureTimeNs, iso)); + + Float fl = result.get(CaptureResult.LENS_FOCAL_LENGTH); + + Float fd = result.get(CaptureResult.LENS_FOCUS_DISTANCE); + + Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE); + + Rect rect = result.get(CaptureResult.SCALER_CROP_REGION); + mFocalLengthHelper.setmFocalLength(fl); + mFocalLengthHelper.setmFocusDistance(fd); + mFocalLengthHelper.setmCropRegion(rect); + SizeF sz_focal_length = mFocalLengthHelper.getFocalLengthPixel(); + String delimiter = ","; + String frame_info = timestamp + + delimiter + sz_focal_length.getWidth() + + delimiter + sz_focal_length.getHeight() + + delimiter + number + + delimiter + exposureTimeNs + + delimiter + frmDurationNs + + delimiter + frmReadoutNs + + delimiter + iso + + delimiter + fl + + delimiter + fd + + delimiter + afMode + + delimiter + unixTime + "000000"; + if (mRecordingMetadata) { + try { + mFrameMetadataWriter.write(frame_info + "\n"); + } catch (IOException err) { + Timber.e(err, "Error writing captureResult"); + } + } + ((CameraActivityBase) mActivity).updateCaptureResultPanel( + sz_focal_length.getWidth(), exposureTimeNs, afMode, mSecondCamera); + } + + }; + + + void changeManualFocusPoint(ManualFocusConfig focusConfig) { + float eventX = focusConfig.mEventX; + float eventY = focusConfig.mEventY; + int viewWidth = focusConfig.mViewWidth; + int viewHeight = focusConfig.mViewHeight; + + final int y = (int) ((eventX / (float) viewWidth) * (float) sensorArraySize.height()); + final int x = (int) ((eventY / (float) viewHeight) * (float) sensorArraySize.width()); + final int halfTouchWidth = 400; + final int halfTouchHeight = 400; + MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x - halfTouchWidth, 0), + Math.max(y - halfTouchHeight, 0), + halfTouchWidth * 2, + halfTouchHeight * 2, + MeteringRectangle.METERING_WEIGHT_MAX - 1); + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, + CameraMetadata.CONTROL_AF_MODE_AUTO); + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, + new MeteringRectangle[]{focusAreaTouch}); + try { + mState = STATE_WAITING_AUTO; + mCaptureSession.setRepeatingRequest( + mPreviewRequestBuilder.build(), mFocusCaptureCallback, null); + } catch (CameraAccessException e) { + Timber.e(e); + } + } + + private void startBackgroundThread() { + if (mBackgroundThread == null || mBackgroundHandler == null) { + Timber.v("startBackgroundThread"); + mBackgroundThread = new HandlerThread("CameraBackground"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + } + + private void stopBackgroundThread() { + Timber.v("stopBackgroundThread"); + try { + if (mBackgroundThread != null) { + mBackgroundThread.quitSafely(); + mBackgroundThread.join(); + } + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + Timber.e(e); + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/CameraActivity.java b/app/src/main/java/io/a3dv/VIRec/CameraActivity.java new file mode 100644 index 0000000..66fe9f5 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/CameraActivity.java @@ -0,0 +1,859 @@ +package io.a3dv.VIRec; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Intent; +import android.content.pm.ActivityInfo; +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.os.Build; +import android.os.Bundle; +import android.os.Environment; +import android.os.Handler; +import android.os.Message; +import android.util.Size; +import android.view.Display; +import android.view.Gravity; +import android.view.MenuItem; +import android.view.Surface; +import android.view.View; +import android.view.WindowManager; +import android.widget.AdapterView; +import android.widget.AdapterView.OnItemSelectedListener; +import android.widget.ArrayAdapter; +import android.widget.ImageButton; +import android.widget.PopupMenu; +import android.widget.Spinner; +import android.widget.TextView; + +import androidx.annotation.RequiresApi; + +import java.io.File; +import java.lang.ref.WeakReference; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import io.a3dv.VIRec.gles.FullFrameRect; +import io.a3dv.VIRec.gles.Texture2dProgram; +import timber.log.Timber; + +class DesiredCameraSetting { + static final int mDesiredFrameWidth = 1280; + static final int mDesiredFrameHeight = 720; + static final Long mDesiredExposureTime = 5000000L; // nanoseconds + static final String mDesiredFrameSize = mDesiredFrameWidth + "x" + mDesiredFrameHeight; +} + +class CameraActivityBase extends Activity implements SurfaceTexture.OnFrameAvailableListener { + protected static final boolean VERBOSE = false; + + // Camera filters; must match up with cameraFilterNames in strings.xml + static final int FILTER_NONE = 0; + static final int FILTER_BLACK_WHITE = 1; + static final int FILTER_BLUR = 2; + static final int FILTER_SHARPEN = 3; + static final int FILTER_EDGE_DETECT = 4; + static final int FILTER_EMBOSS = 5; + + protected TextView mKeyCameraParamsText; + protected TextView mKeyCameraParamsText2; + protected TextView mCaptureResultText; + protected TextView mCaptureResultText2; + + protected int mCameraPreviewWidth, mCameraPreviewHeight; + protected int mVideoFrameWidth, mVideoFrameHeight; + protected int mVideoFrameWidth2, mVideoFrameHeight2; + protected Camera2Proxy mCamera2Proxy = null; + protected Camera2Proxy mCamera2Proxy2 = null; + + protected SampleGLView mGLView; + protected SampleGLView mGLView2; + protected TextureMovieEncoder sVideoEncoder = new TextureMovieEncoder(); + protected TextureMovieEncoder sVideoEncoder2 = new TextureMovieEncoder(); + + /** + * Connects the SurfaceTexture to the Camera preview output, and starts the preview. + */ + public void handleSetSurfaceTexture(SurfaceTexture st) { + st.setOnFrameAvailableListener(this); + + if (mCamera2Proxy != null) { + mCamera2Proxy.setPreviewSurfaceTexture(st); + mCamera2Proxy.openCamera(); + } else { + throw new RuntimeException( + "Try to set surface texture while camera2proxy is null"); + } + } + + public void handleSetSurfaceTexture2(SurfaceTexture st) { + st.setOnFrameAvailableListener(surfaceTexture -> { + if (VERBOSE) Timber.d("ST onFrameAvailable"); + mGLView2.requestRender(); + + final String sFps = String.format(Locale.getDefault(), "%.1f FPS", + sVideoEncoder.mFrameRate); + String previewFacts = "[2] " + mCameraPreviewWidth + "x" + mCameraPreviewHeight + "@" + sFps; + + mKeyCameraParamsText2.setText(previewFacts); + }); + + if (mCamera2Proxy2 != null) { + mCamera2Proxy2.setPreviewSurfaceTexture(st); + mCamera2Proxy2.openCamera(); + } else { + throw new RuntimeException( + "Try to set surface texture while camera2proxy is null"); + } + } + + public Camera2Proxy getmCamera2Proxy() { + if (mCamera2Proxy == null) { + throw new RuntimeException("Get a null Camera2Proxy"); + } + return mCamera2Proxy; + } + + protected String renewOutputDir() { + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss", Locale.US); + String folderName = dateFormat.format(new Date()); + + String dataDir = getExternalFilesDir( + Environment.getDataDirectory().getAbsolutePath()).getAbsolutePath(); + String outputDir = dataDir + File.separator + folderName; + + (new File(outputDir)).mkdirs(); + return outputDir; + } + + // updates mCameraPreviewWidth/Height + protected void setLayoutAspectRatio(Size cameraPreviewSize) { + AspectFrameLayout layout = findViewById(R.id.cameraPreview_afl); + Display display = ((WindowManager) getSystemService(WINDOW_SERVICE)).getDefaultDisplay(); + mCameraPreviewWidth = cameraPreviewSize.getWidth(); + mCameraPreviewHeight = cameraPreviewSize.getHeight(); + if (display.getRotation() == Surface.ROTATION_0) { + layout.setAspectRatio((double) mCameraPreviewHeight / mCameraPreviewWidth); + } else if (display.getRotation() == Surface.ROTATION_180) { + layout.setAspectRatio((double) mCameraPreviewHeight / mCameraPreviewWidth); + } else { + layout.setAspectRatio((double) mCameraPreviewWidth / mCameraPreviewHeight); + } + } + + public void updateCaptureResultPanel( + final Float fl, + final Long exposureTimeNs, final Integer afMode, boolean secondCamera) { + final String sfl = String.format(Locale.getDefault(), "%.3f", fl); + final String sExpoTime = + exposureTimeNs == null ? + "null ms" : + String.format(Locale.getDefault(), "%.2f ms", + exposureTimeNs / 1000000.0); + + final String saf = "AF Mode: " + afMode.toString(); + + if (secondCamera) { + runOnUiThread(() -> mCaptureResultText2.setText(sfl + " " + sExpoTime + " " + saf)); + } else { + runOnUiThread(() -> mCaptureResultText.setText(sfl + " " + sExpoTime + " " + saf)); + } + + } + + @Override + public void onFrameAvailable(SurfaceTexture st) { + // The SurfaceTexture uses this to signal the availability of a new frame. The + // thread that "owns" the external texture associated with the SurfaceTexture (which, + // by virtue of the context being shared, *should* be either one) needs to call + // updateTexImage() to latch the buffer. + // + // Once the buffer is latched, the GLSurfaceView thread can signal the encoder thread. + // This feels backward -- we want recording to be prioritized over rendering -- but + // since recording is only enabled some of the time it's easier to do it this way. + // + // Since GLSurfaceView doesn't establish a Looper, this will *probably* execute on + // the main UI thread. Fortunately, requestRender() can be called from any thread, + // so it doesn't really matter. + if (VERBOSE) Timber.d("ST onFrameAvailable"); + mGLView.requestRender(); + + final String sFps = String.format(Locale.getDefault(), "%.1f FPS", + sVideoEncoder.mFrameRate); + String previewFacts = "[1] " + mCameraPreviewWidth + "x" + mCameraPreviewHeight + "@" + sFps; + + mKeyCameraParamsText.setText(previewFacts); + } +} + +public class CameraActivity extends CameraActivityBase + implements PopupMenu.OnMenuItemClickListener { + private CameraSurfaceRenderer mRenderer = null; + private CameraSurfaceRenderer mRenderer2 = null; + private TextView mOutputDirText; + + private CameraHandler mCameraHandler; + private CameraHandler mCameraHandler2; + private boolean mRecordingEnabled; // controls button state + + private IMUManager mImuManager; + private GPSManager mGpsManager; + private TimeBaseManager mTimeBaseManager; + + @SuppressLint("SourceLockedOrientationActivity") + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT); + setContentView(R.layout.camera_activity); + Spinner spinner = findViewById(R.id.cameraFilter_spinner); + ArrayAdapter adapter = ArrayAdapter.createFromResource(this, + R.array.cameraFilterNames, android.R.layout.simple_spinner_item); + adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); + + // Apply the adapter to the spinner. + spinner.setAdapter(adapter); + spinner.setOnItemSelectedListener(new OnItemSelectedListener() { + @Override + public void onItemSelected(AdapterView parent, View view, int pos, long id) { + Spinner spinner = (Spinner) parent; + final int filterNum = spinner.getSelectedItemPosition(); + TextView textView = (TextView) parent.getChildAt(0); + textView.setTextColor(0xFFFFFFFF); + textView.setGravity(Gravity.CENTER); + + mGLView.queueEvent(() -> { + // notify the renderer that we want to change the encoder's state + mRenderer.changeFilterMode(filterNum); + }); + + mGLView2.queueEvent(() -> { + // notify the renderer that we want to change the encoder's state + mRenderer2.changeFilterMode(filterNum); + }); + } + + @Override + public void onNothingSelected(AdapterView parent) { + } + }); + } + + @Override + protected void onStart() { + super.onStart(); + mCamera2Proxy = new Camera2Proxy(this, false); + mCamera2Proxy2 = new Camera2Proxy(this, true); + Size previewSize = mCamera2Proxy.configureCamera(); + mCamera2Proxy2.configureCamera(); + setLayoutAspectRatio(previewSize); // updates mCameraPreviewWidth/Height + Size videoSize = mCamera2Proxy.getmVideoSize(); + mVideoFrameWidth = videoSize.getWidth(); + mVideoFrameHeight = videoSize.getHeight(); + + Size videoSize2 = mCamera2Proxy2.getmVideoSize(); + mVideoFrameWidth2 = videoSize2.getWidth(); + mVideoFrameHeight2 = videoSize2.getHeight(); + // Define a handler that receives camera-control messages from other threads. All calls + // to Camera must be made on the same thread. Note we create this before the renderer + // thread, so we know the fully-constructed object will be visible. + mCameraHandler = new CameraHandler(this); + mCameraHandler2 = new CameraHandler(this); + + mRecordingEnabled = sVideoEncoder.isRecording(); + + // Configure the GLSurfaceView. This will start the Renderer thread, with an + // appropriate EGL context. + mGLView = findViewById(R.id.cameraPreview_surfaceView); + mGLView2 = findViewById(R.id.cameraPreview_surfaceView2); + + if (mRenderer == null) { + mRenderer = new CameraSurfaceRenderer(mCameraHandler, sVideoEncoder, 0); + mGLView.setEGLContextClientVersion(2); // select GLES 2.0 + mGLView.setRenderer(mRenderer); + mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + if (mRenderer2 == null) { + mRenderer2 = new CameraSurfaceRenderer(mCameraHandler2, sVideoEncoder2, 1); + mGLView2.setEGLContextClientVersion(2); // select GLES 2.0 + mGLView2.setRenderer(mRenderer2); + mGLView2.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + mGLView.setTouchListener((event, width, height) -> { + ManualFocusConfig focusConfig = + new ManualFocusConfig(event.getX(), event.getY(), width, height); + Timber.d(focusConfig.toString()); + mCameraHandler.sendMessage( + mCameraHandler.obtainMessage(CameraHandler.MSG_MANUAL_FOCUS, focusConfig)); + }); + + if (mImuManager == null) { + mImuManager = new IMUManager(this); + mTimeBaseManager = new TimeBaseManager(); + } + + if (mGpsManager == null) { + mGpsManager = new GPSManager(this); + mTimeBaseManager = new TimeBaseManager(); + } + + mKeyCameraParamsText = findViewById(R.id.cameraParams_text); + mKeyCameraParamsText2 = findViewById(R.id.cameraParams_text2); + mCaptureResultText = findViewById(R.id.captureResult_text); + mCaptureResultText2 = findViewById(R.id.captureResult_text2); + mOutputDirText = findViewById(R.id.cameraOutputDir_text); + } + + @Override + protected void onResume() { + Timber.d("onResume -- acquiring camera"); + super.onResume(); + Timber.d("Keeping screen on for previewing recording."); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + updateControls(); + + if (mCamera2Proxy == null) { + mCamera2Proxy = new Camera2Proxy(this, false); + Size previewSize = mCamera2Proxy.configureCamera(); + setLayoutAspectRatio(previewSize); + Size videoSize = mCamera2Proxy.getmVideoSize(); + mVideoFrameWidth = videoSize.getWidth(); + mVideoFrameHeight = videoSize.getHeight(); + } + + if (mCamera2Proxy2 == null) { + mCamera2Proxy2 = new Camera2Proxy(this, true); + mCamera2Proxy2.configureCamera(); + Size videoSize = mCamera2Proxy2.getmVideoSize(); + mVideoFrameWidth2 = videoSize.getWidth(); + mVideoFrameHeight2 = videoSize.getHeight(); + } + + mGLView.onResume(); + mGLView.queueEvent(() -> { + mRenderer.setCameraPreviewSize(mCameraPreviewWidth, mCameraPreviewHeight); + mRenderer.setVideoFrameSize(mVideoFrameWidth, mVideoFrameHeight); + }); + + mGLView2.onResume(); + mGLView2.queueEvent(() -> { + mRenderer2.setCameraPreviewSize(mCameraPreviewWidth, mCameraPreviewHeight); + mRenderer2.setVideoFrameSize(mVideoFrameWidth2, mVideoFrameHeight2); + }); + + mImuManager.register(); + mGpsManager.register(); + } + + @Override + protected void onPause() { + Timber.d("onPause -- releasing camera"); + super.onPause(); + // no more frame metadata will be saved during pause + if (mCamera2Proxy != null) { + mCamera2Proxy.releaseCamera(); + mCamera2Proxy = null; + } + + if (mCamera2Proxy2 != null) { + mCamera2Proxy2.releaseCamera(); + mCamera2Proxy2 = null; + } + + mGLView.queueEvent(() -> { + // Tell the renderer that it's about to be paused so it can clean up. + mRenderer.notifyPausing(); + }); + mGLView.onPause(); + + mGLView2.queueEvent(() -> { + // Tell the renderer that it's about to be paused so it can clean up. + mRenderer2.notifyPausing(); + }); + mGLView2.onPause(); + + mImuManager.unregister(); + mGpsManager.unregister(); + Timber.d("onPause complete"); + } + + @Override + protected void onDestroy() { + Timber.d("onDestroy"); + super.onDestroy(); + mCameraHandler.invalidateHandler(); + mCameraHandler2.invalidateHandler(); + } + + + public void clickToggleRecording(@SuppressWarnings("unused") View unused) { + mRecordingEnabled = !mRecordingEnabled; + if (mRecordingEnabled) { + String outputDir = renewOutputDir(); + String outputFile = outputDir + File.separator + "movie.mp4"; + String outputFile2 = outputDir + File.separator + "movie2.mp4"; + String metaFile = outputDir + File.separator + "frame_timestamps.txt"; + String metaFile2 = outputDir + File.separator + "frame_timestamps2.txt"; + + String basename = outputDir.substring(outputDir.lastIndexOf("/") + 1); + mOutputDirText.setText(basename); + mRenderer.resetOutputFiles(outputFile, metaFile); // this will not cause sync issues + mRenderer2.resetOutputFiles(outputFile2, metaFile2); + + String inertialFile = outputDir + File.separator + "gyro_accel.csv"; + String locationFile = outputDir + File.separator + "location.csv"; + String edgeEpochFile = outputDir + File.separator + "edge_epochs.txt"; + + mTimeBaseManager.startRecording(edgeEpochFile, mCamera2Proxy.getmTimeSourceValue()); + mImuManager.startRecording(inertialFile); + mGpsManager.startRecording(locationFile); + mCamera2Proxy.startRecordingCaptureResult( + outputDir + File.separator + "movie_metadata.csv"); + mCamera2Proxy2.startRecordingCaptureResult( + outputDir + File.separator + "movie_metadata2.csv"); + } else { + mCamera2Proxy.stopRecordingCaptureResult(); + mCamera2Proxy2.stopRecordingCaptureResult(); + mImuManager.stopRecording(); + mGpsManager.stopRecording(); + mTimeBaseManager.stopRecording(); + } + + mGLView.queueEvent(() -> { + // notify the renderer that we want to change the encoder's state + mRenderer.changeRecordingState(mRecordingEnabled); + }); + + mGLView2.queueEvent(() -> { + // notify the renderer that we want to change the encoder's state + mRenderer2.changeRecordingState(mRecordingEnabled); + }); + + updateControls(); + } + + private void updateControls() { + ImageButton toggleRecordingButton = findViewById(R.id.toggleRecordingButton); + toggleRecordingButton.setContentDescription(mRecordingEnabled + ? getString(R.string.stop) + : getString(R.string.record)); + toggleRecordingButton.setImageResource(mRecordingEnabled + ? R.drawable.ic_baseline_stop_24 + : R.drawable.ic_baseline_fiber_manual_record_24); + + Spinner filterSpinner = findViewById(R.id.cameraFilter_spinner); + filterSpinner.setVisibility(mRecordingEnabled ? View.INVISIBLE : View.VISIBLE); + } + + public void clickShowPopupMenu(View v) { + PopupMenu popup = new PopupMenu(getApplicationContext(), v); + popup.setOnMenuItemClickListener(this); + popup.inflate(R.menu.popup_menu); + popup.show(); + } + + @Override + public boolean onMenuItemClick(MenuItem item) { + if (item.getItemId() == R.id.menu_settings) { + final Intent toSettings = new Intent(this, SettingsActivity.class); + startActivity(toSettings); + } else if (item.getItemId() == R.id.menu_imu) { + final Intent toImuViewer = new Intent(this, ImuViewerActivity.class); + startActivity(toImuViewer); + } else if (item.getItemId() == R.id.menu_about) { + final Intent toAbout = new Intent(this, AboutActivity.class); + startActivity(toAbout); + } + + return false; + } +} + + +class CameraHandler extends Handler { + public static final int MSG_SET_SURFACE_TEXTURE = 0; + public static final int MSG_SET_SURFACE_TEXTURE2 = 2; + public static final int MSG_MANUAL_FOCUS = 1; + + // Weak reference to the Activity; only access this from the UI thread. + private final WeakReference mWeakActivity; + + public CameraHandler(Activity activity) { + mWeakActivity = new WeakReference<>(activity); + } + + /** + * Drop the reference to the activity. Useful as a paranoid measure to ensure that + * attempts to access a stale Activity through a handler are caught. + */ + public void invalidateHandler() { + mWeakActivity.clear(); + } + + + @Override // runs on UI thread + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + Object obj = inputMessage.obj; + + Timber.d("CameraHandler [%s]: what=%d", this.toString(), what); + + Activity activity = mWeakActivity.get(); + if (activity == null) { + Timber.w("CameraHandler.handleMessage: activity is null"); + return; + } + + switch (what) { + case MSG_SET_SURFACE_TEXTURE: + ((CameraActivityBase) activity).handleSetSurfaceTexture( + (SurfaceTexture) inputMessage.obj); + break; + case MSG_SET_SURFACE_TEXTURE2: + ((CameraActivityBase) activity).handleSetSurfaceTexture2( + (SurfaceTexture) inputMessage.obj); + break; + case MSG_MANUAL_FOCUS: + Camera2Proxy camera2proxy = ((CameraActivityBase) activity).getmCamera2Proxy(); + camera2proxy.changeManualFocusPoint((ManualFocusConfig) obj); + break; + default: + throw new RuntimeException("unknown msg " + what); + } + } +} + +class CameraSurfaceRenderer implements GLSurfaceView.Renderer { + private static final boolean VERBOSE = false; + + private static final int RECORDING_OFF = 0; + private static final int RECORDING_ON = 1; + private static final int RECORDING_RESUMED = 2; + + private final CameraHandler mCameraHandler; + private final TextureMovieEncoder mVideoEncoder; + private String mOutputFile; + private String mMetadataFile; + + private FullFrameRect mFullScreen; + + private final float[] mSTMatrix = new float[16]; + private int mTextureId; + + private SurfaceTexture mSurfaceTexture; + private boolean mRecordingEnabled; + private int mRecordingStatus; + private int mFrameCount; + + // width/height of the incoming camera preview frames + private boolean mIncomingSizeUpdated; + private int mIncomingWidth; + private int mIncomingHeight; + + private int mVideoFrameWidth; + private int mVideoFrameHeight; + + private int mCurrentFilter; + private int mNewFilter; + + private final int mCameraId; + + public CameraSurfaceRenderer(CameraHandler cameraHandler, + TextureMovieEncoder movieEncoder, int cameraId) { + mCameraHandler = cameraHandler; + mVideoEncoder = movieEncoder; + mTextureId = -1; + + mRecordingStatus = -1; + mRecordingEnabled = false; + mFrameCount = -1; + + mIncomingSizeUpdated = false; + mIncomingWidth = mIncomingHeight = -1; + mVideoFrameWidth = mVideoFrameHeight = -1; + + mCurrentFilter = -1; + mNewFilter = CameraActivity.FILTER_NONE; + + mCameraId = cameraId; + } + + public void resetOutputFiles(String outputFile, String metaFile) { + mOutputFile = outputFile; + mMetadataFile = metaFile; + } + + /** + * Notifies the renderer thread that the activity is pausing. + *

+ * For best results, call this *after* disabling Camera preview. + */ + public void notifyPausing() { + if (mSurfaceTexture != null) { + Timber.d("renderer pausing -- releasing SurfaceTexture"); + mSurfaceTexture.release(); + mSurfaceTexture = null; + } + if (mFullScreen != null) { + mFullScreen.release(false); // assume the GLSurfaceView EGL context is about + mFullScreen = null; // to be destroyed + } + mIncomingWidth = mIncomingHeight = -1; + mVideoFrameWidth = mVideoFrameHeight = -1; + } + + /** + * Notifies the renderer that we want to stop or start recording. + */ + public void changeRecordingState(boolean isRecording) { + Timber.d("changeRecordingState: was %b now %b", mRecordingEnabled, isRecording); + mRecordingEnabled = isRecording; + } + + /** + * Changes the filter that we're applying to the camera preview. + */ + public void changeFilterMode(int filter) { + mNewFilter = filter; + } + + /** + * Updates the filter program. + */ + public void updateFilter() { + Texture2dProgram.ProgramType programType; + float[] kernel = null; + float colorAdj = 0.0f; + + Timber.d("Updating filter to %d", mNewFilter); + switch (mNewFilter) { + case CameraActivity.FILTER_NONE: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT; + break; + case CameraActivity.FILTER_BLACK_WHITE: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BW; + break; + case CameraActivity.FILTER_BLUR: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT_VIEW; + kernel = new float[]{ + 1f / 16f, 2f / 16f, 1f / 16f, + 2f / 16f, 4f / 16f, 2f / 16f, + 1f / 16f, 2f / 16f, 1f / 16f}; + break; + case CameraActivity.FILTER_SHARPEN: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT_VIEW; + kernel = new float[]{ + 0f, -1f, 0f, + -1f, 5f, -1f, + 0f, -1f, 0f}; + break; + case CameraActivity.FILTER_EDGE_DETECT: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT_VIEW; + kernel = new float[]{ + -1f, -1f, -1f, + -1f, 8f, -1f, + -1f, -1f, -1f}; + break; + case CameraActivity.FILTER_EMBOSS: + programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT_VIEW; + kernel = new float[]{ + 2f, 0f, 0f, + 0f, -1f, 0f, + 0f, 0f, -1f}; + colorAdj = 0.5f; + break; + default: + throw new RuntimeException("Unknown filter mode " + mNewFilter); + } + + // Do we need a whole new program? (We want to avoid doing this if we don't have + // too -- compiling a program could be expensive.) + if (programType != mFullScreen.getProgram().getProgramType()) { + mFullScreen.changeProgram(new Texture2dProgram(programType)); + // If we created a new program, we need to initialize the texture width/height. + mIncomingSizeUpdated = true; + } + + // Update the filter kernel (if any). + if (kernel != null) { + mFullScreen.getProgram().setKernel(kernel, colorAdj); + } + + mCurrentFilter = mNewFilter; + } + + /** + * Records the size of the incoming camera preview frames. + *

+ * It's not clear whether this is guaranteed to execute before or after onSurfaceCreated(), + * so we assume it could go either way. (Fortunately they both run on the same thread, + * so we at least know that they won't execute concurrently.) + */ + public void setCameraPreviewSize(int width, int height) { + Timber.d("setCameraPreviewSize"); + mIncomingWidth = width; + mIncomingHeight = height; + mIncomingSizeUpdated = true; + } + + public void setVideoFrameSize(int width, int height) { + mVideoFrameWidth = width; + mVideoFrameHeight = height; + } + + @Override + public void onSurfaceCreated(GL10 unused, EGLConfig config) { + Timber.d("onSurfaceCreated"); + + // We're starting up or coming back. Either way we've got a new EGLContext that will + // need to be shared with the video encoder, so figure out if a recording is already + // in progress. + mRecordingEnabled = mVideoEncoder.isRecording(); + if (mRecordingEnabled) { + mRecordingStatus = RECORDING_RESUMED; + } else { + mRecordingStatus = RECORDING_OFF; + } + + // Set up the texture glitter that will be used for on-screen display. This + // is *not* applied to the recording, because that uses a separate shader. + mFullScreen = new FullFrameRect( + new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT)); + + mTextureId = mFullScreen.createTextureObject(); + + // Create a SurfaceTexture, with an external texture, in this EGL context. We don't + // have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame + // available messages will arrive on the main thread. + mSurfaceTexture = new SurfaceTexture(mTextureId); + + // Tell the UI thread to enable the camera preview. + if (mCameraId == 0) { + mCameraHandler.sendMessage(mCameraHandler.obtainMessage( + CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture)); + } else { + mCameraHandler.sendMessage(mCameraHandler.obtainMessage( + CameraHandler.MSG_SET_SURFACE_TEXTURE2, mSurfaceTexture)); + } + } + + @Override + public void onSurfaceChanged(GL10 unused, int width, int height) { + Timber.d("onSurfaceChanged %dx%d", width, height); + } + + @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1) + @Override + public void onDrawFrame(GL10 unused) { + if (VERBOSE) Timber.d("onDrawFrame tex=%d", mTextureId); + boolean showBox; + + // Latch the latest frame. If there isn't anything new, we'll just re-use whatever + // was there before. + mSurfaceTexture.updateTexImage(); + + // If the recording state is changing, take care of it here. Ideally we wouldn't + // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView + // makes it hard to do elsewhere. + if (mRecordingEnabled) { + switch (mRecordingStatus) { + case RECORDING_OFF: + if (mVideoFrameWidth <= 0 || mVideoFrameHeight <= 0) { + Timber.i("Start recording before setting video frame size; skipping"); + break; + } + Timber.d("Start recording outputFile: %s", mOutputFile); + // The output video has a size e.g., 720x1280. Video of the same size is recorded in + // the portrait mode of the complex CameraRecorder-android at + // https://github.com/MasayukiSuda/CameraRecorder-android. + mVideoEncoder.startRecording( + new TextureMovieEncoder.EncoderConfig( + mOutputFile, mVideoFrameHeight, mVideoFrameWidth, + CameraUtils.calcBitRate(mVideoFrameWidth, mVideoFrameHeight, + VideoEncoderCore.FRAME_RATE), + EGL14.eglGetCurrentContext(), + mFullScreen.getProgram(), mMetadataFile)); + mRecordingStatus = RECORDING_ON; + break; + case RECORDING_RESUMED: + Timber.d("Resume recording"); + mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext()); + mRecordingStatus = RECORDING_ON; + break; + case RECORDING_ON: + break; + default: + throw new RuntimeException("unknown status " + mRecordingStatus); + } + } else { + switch (mRecordingStatus) { + case RECORDING_ON: + case RECORDING_RESUMED: + Timber.d("Stop recording"); + mVideoEncoder.stopRecording(); + mRecordingStatus = RECORDING_OFF; + break; + case RECORDING_OFF: + break; + default: + throw new RuntimeException("unknown status " + mRecordingStatus); + } + } + + // Set the video encoder's texture name. We only need to do this once, but in the + // current implementation it has to happen after the video encoder is started, so + // we just do it here. + mVideoEncoder.setTextureId(mTextureId); + + // Tell the video encoder thread that a new frame is available. + // This will be ignored if we're not actually recording. + mVideoEncoder.frameAvailable(mSurfaceTexture); + + if (mIncomingWidth <= 0 || mIncomingHeight <= 0) { + // Texture size isn't set yet. This is only used for the filters, but to be + // safe we can just skip drawing while we wait for the various races to resolve. + // (This seems to happen if you toggle the screen off/on with power button.) + Timber.i("Drawing before incoming texture size set; skipping"); + return; + } + + // Update the filter, if necessary. + if (mCurrentFilter != mNewFilter) { + updateFilter(); + } + + if (mIncomingSizeUpdated) { + mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight); + mIncomingSizeUpdated = false; + } + + // Draw the video frame. + mSurfaceTexture.getTransformMatrix(mSTMatrix); + mFullScreen.drawFrame(mTextureId, mSTMatrix); + + // Draw a flashing box if we're recording. This only appears on screen. + showBox = (mRecordingStatus == RECORDING_ON); + if (showBox && (++mFrameCount & 0x04) == 0) { + drawBox(); + } + } + + /** + * Draws a red box in the corner. + */ + private void drawBox() { + GLES20.glEnable(GLES20.GL_SCISSOR_TEST); + GLES20.glScissor(0, 0, 50, 50); + GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glDisable(GLES20.GL_SCISSOR_TEST); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/CameraUtils.java b/app/src/main/java/io/a3dv/VIRec/CameraUtils.java new file mode 100644 index 0000000..fdd9b36 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/CameraUtils.java @@ -0,0 +1,88 @@ +package io.a3dv.VIRec; + +import android.util.Size; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +import timber.log.Timber; + +/** + * Camera-related utility functions. + */ +public class CameraUtils { + private static final float BPP = 0.25f; + + /** + * In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes + * larger than 1080p, since MediaRecorder cannot handle such a high-resolution video. + * + * @param choices The list of available sizes + * @return The video size + */ + public static Size chooseVideoSize( + Size[] choices, int wScale, int hScale, int maxWidth) { + for (Size size : choices) { + if (size.getWidth() == size.getHeight() * wScale / hScale && + size.getWidth() <= maxWidth) { + return size; + } + } + Timber.e("Couldn't find any suitable video size"); + return choices[choices.length - 1]; + } + + /** + * Compares two {@code Size}s based on their areas. + */ + static class CompareSizesByArea implements Comparator { + + @Override + public int compare(Size lhs, Size rhs) { + // We cast here to ensure the multiplications won't overflow + return Long.signum((long) lhs.getWidth() * lhs.getHeight() - + (long) rhs.getWidth() * rhs.getHeight()); + } + + } + + /** + * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose + * width and height are at least as large as the respective requested values, and whose aspect + * ratio matches with the specified value. + * + * @param choices The list of sizes that the camera supports for the intended output class + * @param width The minimum desired width + * @param height The minimum desired height + * @param aspectRatio The aspect ratio + * @return The optimal {@code Size}, or an arbitrary one if none were big enough + */ + public static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) { + // Collect the supported resolutions that are at least as big as the preview Surface + List bigEnough = new ArrayList<>(); + int w = aspectRatio.getWidth(); + int h = aspectRatio.getHeight(); + for (Size option : choices) { + if (option.getHeight() == option.getWidth() * h / w && + option.getWidth() >= width && option.getHeight() >= height) { + bigEnough.add(option); + } + } + + // Pick the smallest of those, assuming we found any + if (bigEnough.size() > 0) { + return Collections.min(bigEnough, new CompareSizesByArea()); + } else { + Timber.e("Couldn't find any suitable preview size"); + return choices[0]; + } + } + + public static int calcBitRate(int width, int height, int frame_rate) { + final int bitrate = (int) (BPP * frame_rate * width * height); + Timber.i("bitrate=%d", bitrate); + return bitrate; + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/FileHelper.java b/app/src/main/java/io/a3dv/VIRec/FileHelper.java new file mode 100644 index 0000000..edb8188 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/FileHelper.java @@ -0,0 +1,52 @@ +package io.a3dv.VIRec; + +import android.os.Build; +import android.text.Html; +import android.text.SpannableString; +import android.text.Spanned; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; + +import timber.log.Timber; + +public class FileHelper { + public static BufferedWriter createBufferedWriter(String filename) { + File dest = new File(filename); + try { + if (!dest.exists()) + dest.createNewFile(); + return new BufferedWriter(new FileWriter(dest, true)); + } catch (IOException ioe) { + Timber.e(ioe); + } + return null; + } + + public static void closeBufferedWriter(BufferedWriter writer) { + try { + writer.flush(); + writer.close(); + } catch (IOException ioe) { + Timber.e(ioe); + } + } + + // https://stackoverflow.com/questions/37904739/html-fromhtml-deprecated-in-android-n + @SuppressWarnings("deprecation") + public static Spanned fromHtml(String html) { + if (html == null) { + // return an empty spannable if the html is null + return new SpannableString(""); + } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + // FROM_HTML_MODE_LEGACY is the behaviour that was used for versions below android N + // we are using this flag to give a consistent behaviour + return Html.fromHtml(html, Html.FROM_HTML_MODE_LEGACY); + } else { + return Html.fromHtml(html); + } + } + +} diff --git a/app/src/main/java/io/a3dv/VIRec/FocalLengthHelper.java b/app/src/main/java/io/a3dv/VIRec/FocalLengthHelper.java new file mode 100644 index 0000000..e9c5d1a --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/FocalLengthHelper.java @@ -0,0 +1,130 @@ +package io.a3dv.VIRec; + +// estimate focal length, i.e., imaging distance in pixels, using all sorts of info + +import android.annotation.TargetApi; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.os.Build; +import android.util.Size; +import android.util.SizeF; + +import timber.log.Timber; + +public class FocalLengthHelper { + + private static final String TAG = "FocalLengthHelper"; + + private float[] mIntrinsic; + private Float mFocalLength; + private Float mFocusDistance; + private SizeF mPhysicalSize; + private Size mPixelArraySize; + private Rect mPreCorrectionSize; // This rectangle is defined relative to full pixel array; (0,0) is the top-left of the full pixel array + private Rect mActiveSize; // This rectangle is defined relative to the full pixel array; (0,0) is the top-left of the full pixel array, + private Rect mCropRegion; // Its The coordinate system is defined relative to the active array rectangle given in this field, with (0, 0) being the top-left of this rectangle. + private Size mImageSize; + + public FocalLengthHelper() { + + } + + public void setLensParams(CameraCharacteristics result) { + setLensParams21(result); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + setLensParams23(result); + } + } + + public void setmCropRegion(Rect mCropRegion) { + this.mCropRegion = mCropRegion; + } + + public void setmFocalLength(Float mFocalLength) { + this.mFocalLength = mFocalLength; + } + + public void setmFocusDistance(Float mFocusDistance) { + this.mFocusDistance = mFocusDistance; + } + + public void setmImageSize(Size mImageSize) { + this.mImageSize = mImageSize; + } + + /** + * compute the focal length in pixels. + * First it tries to use values read from LENS_INTRINSIC_CALIBRATION, if not available, + * it will compute focal length based on an empirical model. + * + * focus distance is the inverse of the distance between the lens and the subject, + * assuming LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED. + * see https://stackoverflow.com/questions/60394282/unit-of-camera2-lens-focus-distance + * i is the distance between the imaging sensor and the lens. + * Recall 1/focal_length = focus_distance + 1/i. + * Because focal_length is very small say 3 mm, + * focus_distance is often comparatively small, say 5 1/meter, + * i is often very close to the physical focal length, say 3 mm. + * + * see: https://source.android.com/devices/camera/camera3_crop_reprocess.html + * https://stackoverflow.com/questions/39965408/what-is-the-android-camera2-api-equivalent-of-camera-parameters-gethorizontalvie + * + * @return (focal length along x, focal length along y) in pixels + */ + public SizeF getFocalLengthPixel() { + if (mIntrinsic != null && mIntrinsic[0] > 1.0) { + Timber.d("Focal length set as (%f, %f)",mIntrinsic[0], mIntrinsic[1]); + return new SizeF(mIntrinsic[0], mIntrinsic[1]); + } + + if (mFocalLength != null) { + float imageDistance; // mm + if (mFocusDistance == null || mFocusDistance == 0.f) { + imageDistance = mFocalLength; + } else { + imageDistance = 1000.f / (1000.f / mFocalLength - mFocusDistance); + } + // ignore the effect of distortion on the active array coordinates + float crop_aspect = (float) mCropRegion.width() / + ((float) mCropRegion.height()); + float image_aspect = (float) mImageSize.getWidth() / + ((float) mImageSize.getHeight()); + float f_image_pixel; + if (image_aspect >= crop_aspect) { + float scale = (float) mImageSize.getWidth() / ((float) mCropRegion.width()); + f_image_pixel = scale * imageDistance * mPixelArraySize.getWidth() / + mPhysicalSize.getWidth(); + } else { + float scale = (float) mImageSize.getHeight() / ((float) mCropRegion.height()); + f_image_pixel = scale * imageDistance * mPixelArraySize.getHeight() / + mPhysicalSize.getHeight(); + } + return new SizeF(f_image_pixel, f_image_pixel); + } + return new SizeF(1.0f, 1.0f); + } + + @TargetApi(23) + private void setLensParams23(CameraCharacteristics result) { + mIntrinsic = result.get(CameraCharacteristics.LENS_INTRINSIC_CALIBRATION); + if (mIntrinsic != null) + Timber.d("char lens intrinsics fx %f fy %f cx %f cy %f s %f", + mIntrinsic[0], mIntrinsic[1], mIntrinsic[2], mIntrinsic[3], mIntrinsic[4]); + mPreCorrectionSize = + result.get(CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); + if (mPreCorrectionSize != null) + Timber.d("Precorrection rect %s", mPreCorrectionSize.toString()); + } + + private void setLensParams21(CameraCharacteristics result) { + mPhysicalSize = result.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE); + if (mPhysicalSize != null) + Timber.d("Physical size %s", mPhysicalSize.toString()); + mPixelArraySize = result.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + if (mPixelArraySize != null) + Timber.d("Pixel array size %s", mPixelArraySize.toString()); + mActiveSize = result.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + if (mActiveSize != null) + Timber.d("Active rect %s", mActiveSize.toString()); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/GPSManager.java b/app/src/main/java/io/a3dv/VIRec/GPSManager.java new file mode 100644 index 0000000..ae451f6 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/GPSManager.java @@ -0,0 +1,171 @@ +package io.a3dv.VIRec; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import android.location.Location; +import android.location.LocationListener; +import android.location.LocationManager; +import android.os.Bundle; +import android.text.format.DateFormat; +import android.widget.TextView; + +import org.jetbrains.annotations.NotNull; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Date; + +import timber.log.Timber; + +public class GPSManager implements LocationListener { + public static String GpsHeader = "Timestamp[nanosecond],latitude[degrees],longitude[degrees],altitude[meters],speed[meters/second],Unix time[nanosecond]\n"; + + private final Activity activity; + + private TextView mGpsStatusText; + + private static class LocationPacket { + long timestamp; // nanoseconds + long unixTime; // milliseconds + double latitude; + double longitude; + double altitude; + float speed; + + LocationPacket(long time, long unixTimeMillis, double lat, double lng, double alt, float spd) { + timestamp = time; + unixTime = unixTimeMillis; + latitude = lat; + longitude = lng; + altitude = alt; + speed = spd; + } + + @Override + public @NotNull String toString() { + String delimiter = ","; + return timestamp + + delimiter + latitude + + delimiter + longitude + + delimiter + altitude + + delimiter + speed + + delimiter + unixTime + "000000"; + } + } + + private final LocationManager mGpsManager; + + private volatile boolean mRecordingLocationData = false; + private BufferedWriter mDataWriter = null; + +// private final Deque mLocationData = new ArrayDeque<>(); + + public GPSManager(Activity activity) { + this.activity = activity; + + mGpsManager = (LocationManager) activity.getSystemService(Context.LOCATION_SERVICE); + + mGpsStatusText = (TextView) activity.findViewById(R.id.gps_status); + if (mGpsManager.isProviderEnabled(LocationManager.GPS_PROVIDER)) { + mGpsStatusText.setText(R.string.gpsLooking); + } else { + mGpsStatusText.setText(R.string.gpsStatusDisabled); + } + } + + @SuppressLint("MissingPermission") + public void startRecording(String captureResultFile) { + mGpsManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this); + + try { + mDataWriter = new BufferedWriter(new FileWriter(captureResultFile, false)); + mDataWriter.write(GpsHeader); + mRecordingLocationData = true; + } catch (IOException err) { + Timber.e(err, "IOException in opening location data writer at %s", + captureResultFile); + } + } + + public void stopRecording() { + if (mRecordingLocationData) { + mRecordingLocationData = false; + try { + mDataWriter.flush(); + mDataWriter.close(); + } catch (IOException err) { + Timber.e(err, "IOException in closing location data writer"); + } + mDataWriter = null; + } + } + + @Deprecated + public void onStatusChanged(String provider, int status, Bundle extras) { + Timber.d("GPS status changed | provider: %s, status: %i", provider, status); + } + + @Override + public void onProviderEnabled(String provider) { + Timber.d("GPS provider enabled | provider: %s", provider); + + mGpsStatusText = (TextView) activity.findViewById(R.id.gps_status); + mGpsStatusText.setText(R.string.gpsLooking); + } + + @Override + public void onProviderDisabled(String provider) { + Timber.d("GPS provider disabled | provider: %s", provider); + + mGpsStatusText = (TextView) activity.findViewById(R.id.gps_status); + mGpsStatusText.setText(R.string.gpsStatusDisabled); + } + + @SuppressLint("SetTextI18n") + @Override + public final void onLocationChanged(Location location) { + long unixTime = System.currentTimeMillis(); + + double latitude = location.getLatitude(); + double longitude = location.getLongitude(); + double altitude = location.getAltitude(); + float speed = location.getSpeed(); + + LocationPacket lp = new LocationPacket( + location.getElapsedRealtimeNanos(), + unixTime, + latitude, + longitude, + altitude, + speed + ); + +// mLocationData.add(lp); + + if (mRecordingLocationData) { + try { + mDataWriter.write(lp.toString() + "\n"); + } catch (IOException ioe) { + Timber.e(ioe); + } + } + + Date date = new Date(location.getTime()); + + mGpsStatusText = (TextView) activity.findViewById(R.id.gps_status); + mGpsStatusText.setText("Latitude: " + latitude + "\nLongitude: " + longitude + + "\nTime: " + DateFormat.format("yyyy-MM-dd HH:mm:ss", date) + + "\nAccuracy: " + location.getAccuracy() + "m"); + } + + @SuppressLint("MissingPermission") + public void register() { + mGpsManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this); + } + + public void unregister() { + mGpsManager.removeUpdates(this); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/IMUManager.java b/app/src/main/java/io/a3dv/VIRec/IMUManager.java new file mode 100644 index 0000000..90b846f --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/IMUManager.java @@ -0,0 +1,244 @@ +package io.a3dv.VIRec; + +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Process; + +import androidx.annotation.NonNull; +import androidx.preference.PreferenceManager; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Iterator; + +import timber.log.Timber; + +public class IMUManager implements SensorEventListener { + public static String ImuHeader = "Timestamp[nanosec],gx[rad/s],gy[rad/s],gz[rad/s]," + + "ax[m/s^2],ay[m/s^2],az[m/s^2],Unix time[nanosec]\n"; + + private static class SensorPacket { + long timestamp; // nanoseconds + long unixTime; // milliseconds + float[] values; + + SensorPacket(long time, long unixTimeMillis, float[] vals) { + timestamp = time; + unixTime = unixTimeMillis; + values = vals; + } + + @NonNull + @Override + public String toString() { + String delimiter = ","; + StringBuilder sb = new StringBuilder(); + sb.append(timestamp); + for (float value : values) { + sb.append(delimiter).append(value); + } + sb.append(delimiter).append(unixTime).append("000000"); + return sb.toString(); + } + } + + // Sensor listeners + private final SensorManager mSensorManager; + private final Sensor mAccel; + private final Sensor mGyro; + private static SharedPreferences mSharedPreferences; + + private volatile boolean mRecordingInertialData = false; + private BufferedWriter mDataWriter = null; + private HandlerThread mSensorThread; + + private final Deque mGyroData = new ArrayDeque<>(); + private final Deque mAccelData = new ArrayDeque<>(); + + public IMUManager(Activity activity) { + mSensorManager = (SensorManager) activity.getSystemService(Context.SENSOR_SERVICE); + mAccel = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); + mGyro = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); + mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + } + + public void startRecording(String captureResultFile) { + try { + mDataWriter = new BufferedWriter( + new FileWriter(captureResultFile, false)); + if (mGyro == null || mAccel == null) { + String warning = "The device may not have a gyroscope or an accelerometer!\n" + + "No IMU data will be logged.\n" + + "Has Gyroscope? " + (mGyro == null ? "No" : "Yes") + "\n" + + "Has Accelerometer? " + (mAccel == null ? "No" : "Yes") + "\n"; + mDataWriter.write(warning); + } else { + mDataWriter.write(ImuHeader); + } + mRecordingInertialData = true; + } catch (IOException err) { + Timber.e(err, "IOException in opening inertial data writer at %s", + captureResultFile); + } + } + + public void stopRecording() { + if (mRecordingInertialData) { + mRecordingInertialData = false; + try { + mDataWriter.flush(); + mDataWriter.close(); + } catch (IOException err) { + Timber.e(err, "IOException in closing inertial data writer"); + } + mDataWriter = null; + } + } + + @Override + public final void onAccuracyChanged(Sensor sensor, int accuracy) { + } + + // sync inertial data by interpolating linear acceleration for each gyro data + // Because the sensor events are delivered to the handler thread in order, + // no need for synchronization here + private SensorPacket syncInertialData() { + if (mGyroData.size() >= 1 && mAccelData.size() >= 2) { + SensorPacket oldestGyro = mGyroData.peekFirst(); + SensorPacket oldestAccel = mAccelData.peekFirst(); + SensorPacket latestAccel = mAccelData.peekLast(); + + assert oldestGyro != null; + assert oldestAccel != null; + if (oldestGyro.timestamp < oldestAccel.timestamp) { + Timber.w("throwing one gyro data"); + mGyroData.removeFirst(); + } else { + assert latestAccel != null; + if (oldestGyro.timestamp > latestAccel.timestamp) { + Timber.w("throwing #accel data %d", mAccelData.size() - 1); + mAccelData.clear(); + mAccelData.add(latestAccel); + } else { // linearly interpolate the accel data at the gyro timestamp + float[] gyro_accel = new float[6]; + SensorPacket sp = new SensorPacket(oldestGyro.timestamp, oldestGyro.unixTime, gyro_accel); + gyro_accel[0] = oldestGyro.values[0]; + gyro_accel[1] = oldestGyro.values[1]; + gyro_accel[2] = oldestGyro.values[2]; + + SensorPacket leftAccel = null; + SensorPacket rightAccel = null; + for (SensorPacket packet : mAccelData) { + if (packet.timestamp <= oldestGyro.timestamp) { + leftAccel = packet; + } else { + rightAccel = packet; + break; + } + } + + // if the accelerometer data has a timestamp within the + // [t-x, t+x] of the gyro data at t, then the original acceleration data + // is used instead of linear interpolation + // nanoseconds + long mInterpolationTimeResolution = 500; + assert leftAccel != null; + if (oldestGyro.timestamp - leftAccel.timestamp <= + mInterpolationTimeResolution) { + gyro_accel[3] = leftAccel.values[0]; + gyro_accel[4] = leftAccel.values[1]; + gyro_accel[5] = leftAccel.values[2]; + } else { + assert rightAccel != null; + if (rightAccel.timestamp - oldestGyro.timestamp <= + mInterpolationTimeResolution) { + gyro_accel[3] = rightAccel.values[0]; + gyro_accel[4] = rightAccel.values[1]; + gyro_accel[5] = rightAccel.values[2]; + } else { + float tmp1 = oldestGyro.timestamp - leftAccel.timestamp; + float tmp2 = rightAccel.timestamp - leftAccel.timestamp; + float ratio = tmp1 / tmp2; + gyro_accel[3] = leftAccel.values[0] + + (rightAccel.values[0] - leftAccel.values[0]) * ratio; + gyro_accel[4] = leftAccel.values[1] + + (rightAccel.values[1] - leftAccel.values[1]) * ratio; + gyro_accel[5] = leftAccel.values[2] + + (rightAccel.values[2] - leftAccel.values[2]) * ratio; + } + } + + mGyroData.removeFirst(); + for (Iterator iterator = mAccelData.iterator(); + iterator.hasNext(); ) { + SensorPacket packet = iterator.next(); + if (packet.timestamp < leftAccel.timestamp) { + // Remove the current element from the iterator and the list. + iterator.remove(); + } else { + break; + } + } + return sp; + } + } + } + return null; + } + + @Override + public final void onSensorChanged(SensorEvent event) { + long unixTime = System.currentTimeMillis(); + if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { + SensorPacket sp = new SensorPacket(event.timestamp, unixTime, event.values); + mAccelData.add(sp); + } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { + SensorPacket sp = new SensorPacket(event.timestamp, unixTime, event.values); + mGyroData.add(sp); + SensorPacket syncedData = syncInertialData(); + if (syncedData != null && mRecordingInertialData) { + try { + mDataWriter.write(syncedData.toString() + "\n"); + } catch (IOException ioe) { + Timber.e(ioe); + } + } + } + } + + /** + * This will register all IMU listeners + */ + public void register() { + mSensorThread = new HandlerThread("Sensor thread", + Process.THREAD_PRIORITY_MORE_FAVORABLE); + mSensorThread.start(); + String imuFreq = mSharedPreferences.getString("prefImuFreq", "1"); + int mSensorRate = Integer.parseInt(imuFreq); + // Blocks until looper is prepared, which is fairly quick + Handler sensorHandler = new Handler(mSensorThread.getLooper()); + mSensorManager.registerListener(this, mAccel, mSensorRate, sensorHandler); + mSensorManager.registerListener(this, mGyro, mSensorRate, sensorHandler); + } + + /** + * This will unregister all IMU listeners + */ + public void unregister() { + mSensorManager.unregisterListener(this, mAccel); + mSensorManager.unregisterListener(this, mGyro); + mSensorManager.unregisterListener(this); + mSensorThread.quitSafely(); + stopRecording(); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/ImuRecyclerViewAdapter.java b/app/src/main/java/io/a3dv/VIRec/ImuRecyclerViewAdapter.java new file mode 100644 index 0000000..37480bd --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/ImuRecyclerViewAdapter.java @@ -0,0 +1,84 @@ +package io.a3dv.VIRec; + +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.RecyclerView; + +import java.util.List; +import java.util.Locale; + +import io.a3dv.VIRec.ImuViewContent.SingleAxis; +import io.a3dv.VIRec.ImuViewFragment.OnListFragmentInteractionListener; + +public class ImuRecyclerViewAdapter extends RecyclerView.Adapter { + private final List mValues; + private final OnListFragmentInteractionListener mListener; + + public ImuRecyclerViewAdapter(List items, OnListFragmentInteractionListener listener) { + mValues = items; + mListener = listener; + } + + @NonNull + @Override + public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View itemView = LayoutInflater.from(parent.getContext()).inflate( + R.layout.imu_fragment, parent, false); + return new ViewHolder(itemView); + } + + @Override + public void onBindViewHolder(final ViewHolder holder, int position) { + holder.mItem = mValues.get(position); + holder.mIdView.setText(holder.mItem.id); + holder.mContentView.setText(holder.mItem.content); + holder.mUnitView.setText(FileHelper.fromHtml(holder.mItem.unit)); + holder.mView.setOnClickListener(v -> { + if (null != mListener) { + // Notify the active callbacks interface (the activity, if the + // fragment is attached to one) that an item has been selected. + mListener.onListFragmentInteraction(holder.mItem); + } + }); + } + + @Override + public int getItemViewType(int position) { + return R.layout.imu_fragment; + } + + @Override + public int getItemCount() { + return mValues.size(); + } + + public void updateListItem(int position, float value) { + mValues.get(position).content = String.format(Locale.US, "%.3f", value); + } + + public static class ViewHolder extends RecyclerView.ViewHolder { + public final View mView; + public final TextView mIdView; + public final TextView mContentView; + public final TextView mUnitView; + public SingleAxis mItem; + + public ViewHolder(View view) { + super(view); + mView = view; + mIdView = view.findViewById(R.id.item_number); + mContentView = view.findViewById(R.id.content); + mUnitView = view.findViewById(R.id.unit); + } + + @NonNull + @Override + public String toString() { + return super.toString() + " '" + mContentView.getText() + "'"; + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/ImuViewContent.java b/app/src/main/java/io/a3dv/VIRec/ImuViewContent.java new file mode 100644 index 0000000..521bbd8 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/ImuViewContent.java @@ -0,0 +1,67 @@ +package io.a3dv.VIRec; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Helper class for providing content for ImuViewFragment. + * + */ +public class ImuViewContent { + + public static final List ITEMS = new ArrayList(); + + /** + * A map of items, by ID. + */ + public static final Map ITEM_MAP = new HashMap(); + + static { + addItem(new SingleAxis("Accel X", String.valueOf(0.0), + "X axis of accelerometer", "m/s2")); + addItem(new SingleAxis( "Accel Y", String.valueOf(0.0), + "Y axis of accelerometer", "m/s2")); + addItem(new SingleAxis( "Accel Z", String.valueOf(0.0), + "Z axis of accelerometer", "m/s2")); + + addItem(new SingleAxis( "Gyro X", String.valueOf(0.0), + "X axis of gyroscope", "rad/s")); + addItem(new SingleAxis( "Gyro Y", String.valueOf(0.0), + "Y axis of gyroscope", "rad/s")); + addItem(new SingleAxis( "Gyro Z", String.valueOf(0.0), + "Z axis of gyroscope", "rad/s")); + + addItem(new SingleAxis("Mag X", String.valueOf(0.0), + "X axis of magnetometer", "&mu T")); + addItem(new SingleAxis( "Mag Y", String.valueOf(0.0), + "Y axis of magnetometer", "&mu T")); + addItem(new SingleAxis("Mag Z", String.valueOf(0.0), + "Z axis of magnetometer", "&mu T")); + } + + private static void addItem(SingleAxis item) { + ITEMS.add(item); + ITEM_MAP.put(item.id, item); + } + + public static class SingleAxis { + public final String id; + public String content; + public final String details; + public final String unit; + + public SingleAxis(String id, String content, String details, String unit) { + this.id = id; + this.content = content; + this.details = details; + this.unit = unit; + } + + @Override + public String toString() { + return content; + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/ImuViewFragment.java b/app/src/main/java/io/a3dv/VIRec/ImuViewFragment.java new file mode 100644 index 0000000..1c486f6 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/ImuViewFragment.java @@ -0,0 +1,176 @@ +package io.a3dv.VIRec; + +import android.content.Context; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Process; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.NonNull; +import androidx.fragment.app.Fragment; +import androidx.recyclerview.widget.GridLayoutManager; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import io.a3dv.VIRec.ImuViewContent.SingleAxis; + + +public class ImuViewFragment extends Fragment implements SensorEventListener { + private static final String ARG_COLUMN_COUNT = "column-count"; + private int mColumnCount = 1; + private OnListFragmentInteractionListener mListener; + ImuRecyclerViewAdapter mAdapter; + private SensorManager mSensorManager; + private Sensor mAccel; + private Sensor mGyro; + private Sensor mMag; + private static class SensorPacket { + long timestamp; + float[] values; + + SensorPacket(long time, float[] vals) { + timestamp = time; + values = vals; + } + } + private HandlerThread mSensorThread; + + + public ImuViewFragment() { + } + + @SuppressWarnings("unused") + public static ImuViewFragment newInstance(int columnCount) { + ImuViewFragment fragment = new ImuViewFragment(); + Bundle args = new Bundle(); + args.putInt(ARG_COLUMN_COUNT, columnCount); + fragment.setArguments(args); + return fragment; + } + + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + + if (getArguments() != null) { + mColumnCount = getArguments().getInt(ARG_COLUMN_COUNT); + } + + mSensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); + mAccel = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); // warn: mAccel can be null. + mGyro = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); // warn: mGyro can be null. + mMag = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); + } + + @Override + public View onCreateView(LayoutInflater inflater, ViewGroup container, + Bundle savedInstanceState) { + View view = inflater.inflate(R.layout.imu_list_fragment, container, false); + if (view instanceof RecyclerView) { + Context context = view.getContext(); + RecyclerView recyclerView = (RecyclerView) view; + if (mColumnCount <= 1) { + recyclerView.setLayoutManager(new LinearLayoutManager(context)); + } else { + recyclerView.setLayoutManager(new GridLayoutManager(context, mColumnCount)); + } + mAdapter = new ImuRecyclerViewAdapter(ImuViewContent.ITEMS, mListener); + recyclerView.setAdapter(mAdapter); + } + return view; + } + + @Override + public void onAttach(@NonNull Context context) { + super.onAttach(context); + if (context instanceof OnListFragmentInteractionListener) { + mListener = (OnListFragmentInteractionListener) context; + } else { + throw new RuntimeException(context.toString() + + " must implement OnListFragmentInteractionListener"); + } + } + + @Override + public void onDetach() { + super.onDetach(); + mListener = null; + } + + @Override + public void onResume() { + super.onResume(); + registerImu(); + } + + @Override + public void onPause() { + super.onPause(); + unregisterImu(); + } + + + public interface OnListFragmentInteractionListener { + void onListFragmentInteraction(SingleAxis item); + } + + @Override + public final void onAccuracyChanged(Sensor sensor, int accuracy) { + } + + @Override + public final void onSensorChanged(SensorEvent event) { + if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { + SensorPacket sp = new SensorPacket(event.timestamp, event.values); + for (int i = 0; i < 3; ++i) { + mAdapter.updateListItem(i, sp.values[i]); + } + } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { + SensorPacket sp = new SensorPacket(event.timestamp, event.values); + for (int i = 0; i < 3; ++i) { + mAdapter.updateListItem(i + 3, sp.values[i]); + } + } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { + SensorPacket sp = new SensorPacket(event.timestamp, event.values); + for (int i = 0; i < 3; ++i) { + mAdapter.updateListItem(i + 6, sp.values[i]); + } + } + + getActivity().runOnUiThread(() -> mAdapter.notifyDataSetChanged()); + } + + /** + * This will register all IMU listeners + */ + public void registerImu() { + mSensorThread = new HandlerThread("Sensor thread", + Process.THREAD_PRIORITY_MORE_FAVORABLE); + mSensorThread.start(); + Handler sensorHandler = new Handler(mSensorThread.getLooper()); + + int mSensorRate = SensorManager.SENSOR_DELAY_UI; + + mSensorManager.registerListener(this, mAccel, mSensorRate, sensorHandler); + mSensorManager.registerListener(this, mGyro, mSensorRate, sensorHandler); + mSensorManager.registerListener(this, mMag, mSensorRate, sensorHandler); + } + + /** + * This will unregister all IMU listeners + */ + public void unregisterImu() { + mSensorManager.unregisterListener(this, mAccel); + mSensorManager.unregisterListener(this, mGyro); + mSensorManager.unregisterListener(this, mMag); + mSensorManager.unregisterListener(this); + mSensorThread.quitSafely(); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/ImuViewerActivity.java b/app/src/main/java/io/a3dv/VIRec/ImuViewerActivity.java new file mode 100644 index 0000000..a2fc206 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/ImuViewerActivity.java @@ -0,0 +1,25 @@ +package io.a3dv.VIRec; + +import android.os.Bundle; + +import androidx.appcompat.app.AppCompatActivity; + +public class ImuViewerActivity extends AppCompatActivity implements ImuViewFragment.OnListFragmentInteractionListener { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.menu_intent_activity); + if (savedInstanceState == null) { + getSupportFragmentManager() + .beginTransaction() + .replace(R.id.menu_intent, new ImuViewFragment()) + .commit(); + } + } + + @Override + public void onListFragmentInteraction(ImuViewContent.SingleAxis item) { + + } +} \ No newline at end of file diff --git a/app/src/main/java/io/a3dv/VIRec/MainActivity.java b/app/src/main/java/io/a3dv/VIRec/MainActivity.java new file mode 100644 index 0000000..242582c --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/MainActivity.java @@ -0,0 +1,84 @@ +package io.a3dv.VIRec; + +import android.Manifest; +import android.app.Activity; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.os.Bundle; +import android.widget.Toast; + +import androidx.annotation.NonNull; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import timber.log.Timber; + +public class MainActivity extends Activity { + + private static final String[] REQUIRED_SDK_PERMISSIONS = new String[] { + Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.ACCESS_FINE_LOCATION }; + + private final static int REQUEST_CODE_ASK_PERMISSIONS = 5947; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.main_activity); + + if (BuildConfig.DEBUG) { + Timber.plant(new Timber.DebugTree()); + } + + checkPermissions(); + } + + /** + * Checks the dynamically-controlled permissions and requests missing permissions from end user. + * see https://developer.here.com/documentation/android-starter/dev_guide/topics/request-android-permissions.html + */ + protected void checkPermissions() { + final List missingPermissions = new ArrayList<>(); + // check all required dynamic permissions + for (final String permission : REQUIRED_SDK_PERMISSIONS) { + final int result = ContextCompat.checkSelfPermission(this, permission); + if (result != PackageManager.PERMISSION_GRANTED) { + missingPermissions.add(permission); + } + } + if (!missingPermissions.isEmpty()) { + // request all missing permissions + final String[] permissions = missingPermissions + .toArray(new String[0]); + ActivityCompat.requestPermissions(this, permissions, REQUEST_CODE_ASK_PERMISSIONS); + } else { + final int[] grantResults = new int[REQUIRED_SDK_PERMISSIONS.length]; + Arrays.fill(grantResults, PackageManager.PERMISSION_GRANTED); + onRequestPermissionsResult(REQUEST_CODE_ASK_PERMISSIONS, REQUIRED_SDK_PERMISSIONS, + grantResults); + } + } + + @Override + public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, + @NonNull int[] grantResults) { + if (requestCode == REQUEST_CODE_ASK_PERMISSIONS) { + for (int index = permissions.length - 1; index >= 0; --index) { + if (grantResults[index] != PackageManager.PERMISSION_GRANTED) { + // exit the app if one permission is not granted + Toast.makeText(this, "Required permission '" + permissions[index] + + "' not granted, exiting", Toast.LENGTH_LONG).show(); + finish(); + return; + } + } + // all permissions were granted + Intent intent = new Intent(this, CameraActivity.class); + intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION | Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); + startActivity(intent); + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/ManualFocusConfig.java b/app/src/main/java/io/a3dv/VIRec/ManualFocusConfig.java new file mode 100644 index 0000000..d83c7d7 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/ManualFocusConfig.java @@ -0,0 +1,21 @@ +package io.a3dv.VIRec; + +public class ManualFocusConfig { + final float mEventX; + final float mEventY; + final int mViewWidth; + final int mViewHeight; + + public ManualFocusConfig(float eventX, float eventY, int viewWidth, int viewHeight) { + mEventX = eventX; + mEventY = eventY; + mViewWidth = viewWidth; + mViewHeight = viewHeight; + } + + @Override + public String toString() { + return "ManualFocusConfig: " + mViewWidth + "x" + mViewHeight + " @ " + mEventX + + "," + mEventY; + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/SampleGLView.java b/app/src/main/java/io/a3dv/VIRec/SampleGLView.java new file mode 100644 index 0000000..265b450 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/SampleGLView.java @@ -0,0 +1,44 @@ +package io.a3dv.VIRec; + +import android.content.Context; +import android.opengl.GLSurfaceView; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; + + +public class SampleGLView extends GLSurfaceView implements View.OnTouchListener { + + public SampleGLView(Context context) { + this(context, null); + } + + public SampleGLView(Context context, AttributeSet attrs) { + super(context, attrs); + setOnTouchListener(this); + } + + private TouchListener touchListener; + + @Override + public boolean onTouch(View v, MotionEvent event) { + final int actionMasked = event.getActionMasked(); + if (actionMasked != MotionEvent.ACTION_DOWN) { + return false; + } + + if (touchListener != null) { + touchListener.onTouch(event, v.getWidth(), v.getHeight()); + } + return false; + } + + public interface TouchListener { + void onTouch(MotionEvent event, int width, int height); + } + + public void setTouchListener(TouchListener touchListener) { + this.touchListener = touchListener; + } +} + diff --git a/app/src/main/java/io/a3dv/VIRec/SettingsActivity.java b/app/src/main/java/io/a3dv/VIRec/SettingsActivity.java new file mode 100644 index 0000000..7eb393e --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/SettingsActivity.java @@ -0,0 +1,21 @@ +package io.a3dv.VIRec; + +import android.os.Bundle; + +import androidx.appcompat.app.AppCompatActivity; + +public class SettingsActivity extends AppCompatActivity implements SettingsFragment.OnFragmentInteractionListener { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.menu_intent_activity); + if (savedInstanceState == null) { + getSupportFragmentManager() + .beginTransaction() + .replace(R.id.menu_intent, new SettingsFragment()) + .commit(); + } + } + +} \ No newline at end of file diff --git a/app/src/main/java/io/a3dv/VIRec/SettingsFragment.java b/app/src/main/java/io/a3dv/VIRec/SettingsFragment.java new file mode 100644 index 0000000..200bde5 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/SettingsFragment.java @@ -0,0 +1,288 @@ +package io.a3dv.VIRec; + +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.MediaRecorder; +import android.os.Bundle; +import android.util.Range; +import android.util.Size; +import android.widget.Toast; + +import androidx.annotation.NonNull; +import androidx.preference.EditTextPreference; +import androidx.preference.ListPreference; +import androidx.preference.Preference; +import androidx.preference.PreferenceFragmentCompat; +import androidx.preference.PreferenceManager; + +import java.util.Arrays; + +public class SettingsFragment extends PreferenceFragmentCompat + implements SharedPreferences.OnSharedPreferenceChangeListener { + /** + * Checks that a preference is a valid numerical value + */ + Preference.OnPreferenceChangeListener checkISOListener = (preference, newValue) -> { + //Check that the string is an integer. + return checkIso(newValue); + }; + + public SettingsFragment() { + // Required empty public constructor + } + + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + } + + @Override + public void onDestroy() { + super.onDestroy(); + } + + @Override + public void onCreatePreferences(Bundle savedInstanceState, String rootKey) { + setPreferencesFromResource(R.xml.settings, rootKey); + + PreferenceManager.getDefaultSharedPreferences( + getActivity()).registerOnSharedPreferenceChangeListener(this); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences( + getActivity()); + + ListPreference cameraList = getPreferenceManager().findPreference("prefCamera"); + ListPreference cameraList2 = getPreferenceManager().findPreference("prefCamera2"); + ListPreference cameraRez = getPreferenceManager().findPreference("prefSizeRaw"); +// ListPreference cameraFocus = (ListPreference) +// getPreferenceManager().findPreference("prefFocusDistance"); + + EditTextPreference prefISO = getPreferenceScreen().findPreference("prefISO"); + EditTextPreference prefExposureTime = getPreferenceScreen().findPreference("prefExposureTime"); + + assert prefISO != null; + prefISO.setOnPreferenceChangeListener(checkISOListener); + + try { + Activity activity = getActivity(); + CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); + int cameraSize = manager.getCameraIdList().length; + CharSequence[] entries = new CharSequence[cameraSize]; + CharSequence[] entriesValues = new CharSequence[cameraSize]; + for (int i = 0; i < cameraSize; i++) { + String cameraId = manager.getCameraIdList()[i]; + CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); + try { + if (characteristics.get(CameraCharacteristics.LENS_FACING) == + CameraMetadata.LENS_FACING_BACK) { + entries[i] = cameraId + " - Lens Facing Back"; + } else if (characteristics.get(CameraCharacteristics.LENS_FACING) == + CameraMetadata.LENS_FACING_FRONT) { + entries[i] = cameraId + " - Lens Facing Front"; + } else { + entries[i] = cameraId + " - Lens External"; + } + } catch (NullPointerException e) { + e.printStackTrace(); + entries[i] = cameraId + " - Lens Facing Unknown"; + } + entriesValues[i] = cameraId; + } + + // Update our settings entry + assert cameraList != null; + cameraList.setEntries(entries); + cameraList.setEntryValues(entriesValues); + cameraList.setDefaultValue(entriesValues[0]); + + if (sharedPreferences.getString("prefCamera", "None").equals("None")) { + cameraList.setValue((String) entriesValues[0]); + } + + assert cameraList2 != null; + if (cameraSize > 1) { + cameraList2.setEntries(entries); + cameraList2.setEntryValues(entriesValues); + cameraList2.setDefaultValue(entriesValues[1]); + + if (sharedPreferences.getString("prefCamera2", "None").equals("None")) { + cameraList2.setValue((String) entriesValues[1]); + } + } else { + cameraList2.setEnabled(false); + } + + + // Do not call "cameraList.setValueIndex(0)" which will invoke onSharedPreferenceChanged + // if the previous camera is not 0, and cause null pointer exception. + + // Right now we have selected the first camera, so lets populate the resolution list + // We should just use the default if there is not a shared setting yet + CameraCharacteristics characteristics = manager.getCameraCharacteristics( + sharedPreferences.getString("prefCamera", entriesValues[0].toString())); + StreamConfigurationMap streamConfigurationMap = characteristics.get( + CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + Size[] sizes = streamConfigurationMap.getOutputSizes(MediaRecorder.class); + + int rezSize = sizes.length; + CharSequence[] rez = new CharSequence[rezSize]; + CharSequence[] rezValues = new CharSequence[rezSize]; + int defaultIndex = 0; + for (int i = 0; i < sizes.length; i++) { + rez[i] = sizes[i].getWidth() + "x" + sizes[i].getHeight(); + rezValues[i] = sizes[i].getWidth() + "x" + sizes[i].getHeight(); + if (sizes[i].getWidth() + sizes[i].getHeight() == + DesiredCameraSetting.mDesiredFrameWidth + + DesiredCameraSetting.mDesiredFrameHeight) { + defaultIndex = i; + } + } + + assert cameraRez != null; + cameraRez.setEntries(rez); + cameraRez.setEntryValues(rezValues); + cameraRez.setDefaultValue(rezValues[defaultIndex]); + + Range isoRange = characteristics.get( + CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE); + if (isoRange != null) { + String rangeStr = "[" + isoRange.getLower() + "," + isoRange.getUpper() + "] (1)"; + prefISO.setDialogTitle("Adjust ISO in range " + rangeStr); + } + + Range exposureTimeRangeNs = characteristics.get( + CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE); + if (exposureTimeRangeNs != null) { + Range exposureTimeRangeMs = new Range<>( + (float) (exposureTimeRangeNs.getLower().floatValue() / 1e6), + (float) (exposureTimeRangeNs.getUpper().floatValue() / 1e6)); + String rangeStr = "[" + exposureTimeRangeMs.getLower() + "," + + exposureTimeRangeMs.getUpper() + "] (ms)"; + assert prefExposureTime != null; + prefExposureTime.setDialogTitle("Adjust exposure time in range " + rangeStr); + } + + // Get the possible focus lengths, on non-optical devices this only has one value + // https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#LENS_INFO_AVAILABLE_FOCAL_LENGTHS +// float[] focus_lengths = characteristics.get( +// CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS); +// CharSequence[] focuses = new CharSequence[focus_lengths.length]; +// for (int i = 0; i < focus_lengths.length; i++) { +// focuses[i] = focus_lengths[i] + ""; +// } + } catch (CameraAccessException | NullPointerException e) { + e.printStackTrace(); + } + } + + private boolean checkIso(Object newValue) { + if (!newValue.toString().equals("") && newValue.toString().matches("\\d*")) { + return true; + } else { + Toast.makeText(getActivity(), + newValue + " is not a valid number!", Toast.LENGTH_SHORT).show(); + return false; + } + } + + private void switchPrefCameraValues(String main, String SecondaryKey) { + ListPreference list = getPreferenceManager().findPreference(SecondaryKey); + assert list != null; + CharSequence[] entryValues = list.getEntryValues(); + int index = Arrays.asList(entryValues).indexOf(main); + + if (index + 1 < entryValues.length) { + list.setValue((String) entryValues[index + 1]); + } else { + list.setValue((String) entryValues[index - 1]); + } + } + + @Override + public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { + if (key.equals("prefCamera")) { + try { + String cameraId = sharedPreferences.getString("prefCamera", "0"); + String camera2Id = sharedPreferences.getString("prefCamera2", "1"); + + if (cameraId.equals(camera2Id)) { + switchPrefCameraValues(cameraId, "prefCamera2"); + } + + Activity activity = getActivity(); + CameraManager manager = (CameraManager) + activity.getSystemService(Context.CAMERA_SERVICE); + + CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); + StreamConfigurationMap streamConfigurationMap = characteristics.get( + CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + Size[] sizes = streamConfigurationMap.getOutputSizes(MediaRecorder.class); + + int rezSize = sizes.length; + CharSequence[] rez = new CharSequence[rezSize]; + CharSequence[] rezValues = new CharSequence[rezSize]; + int defaultIndex = 0; + for (int i = 0; i < sizes.length; i++) { + rez[i] = sizes[i].getWidth() + "x" + sizes[i].getHeight(); + rezValues[i] = sizes[i].getWidth() + "x" + sizes[i].getHeight(); + if (sizes[i].getWidth() + sizes[i].getHeight() == + DesiredCameraSetting.mDesiredFrameWidth + + DesiredCameraSetting.mDesiredFrameHeight) { + defaultIndex = i; + } + } + + ListPreference cameraRez = getPreferenceManager().findPreference("prefSizeRaw"); + + assert cameraRez != null; + cameraRez.setEntries(rez); + cameraRez.setEntryValues(rezValues); + cameraRez.setValueIndex(defaultIndex); + +// float[] focus_lengths = characteristics.get( +// CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS); +// CharSequence[] focuses = new CharSequence[focus_lengths.length]; +// for (int i = 0; i < focus_lengths.length; i++) { +// focuses[i] = focus_lengths[i] + ""; +// } + } catch (CameraAccessException | NullPointerException e) { + e.printStackTrace(); + } + } else if (key.equals("prefCamera2")) { + String cameraId = sharedPreferences.getString("prefCamera", "0"); + String camera2Id = sharedPreferences.getString("prefCamera2", "1"); + + if (cameraId.equals(camera2Id)) { + switchPrefCameraValues(camera2Id, "prefCamera"); + } + } + } + + @Override + public void onResume() { + super.onResume(); + } + + @Override + public void onAttach(@NonNull Context context) { + super.onAttach(context); + if (!(context instanceof OnFragmentInteractionListener)) { + throw new RuntimeException(context.toString() + + " must implement OnFragmentInteractionListener"); + } + } + + @Override + public void onDetach() { + super.onDetach(); + } + + public interface OnFragmentInteractionListener { + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/TextureMovieEncoder.java b/app/src/main/java/io/a3dv/VIRec/TextureMovieEncoder.java new file mode 100644 index 0000000..5a21838 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/TextureMovieEncoder.java @@ -0,0 +1,418 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec; + +import android.graphics.SurfaceTexture; +import android.opengl.EGLContext; +import android.os.Handler; +import android.os.Looper; +import android.os.Message; + +import java.io.IOException; +import java.lang.ref.WeakReference; + +import io.a3dv.VIRec.gles.EglCore; +import io.a3dv.VIRec.gles.FullFrameRect; +import io.a3dv.VIRec.gles.Texture2dProgram; +import io.a3dv.VIRec.gles.WindowSurface; +import timber.log.Timber; + +/** + * Encode a movie from frames rendered from an external texture image. + *

+ * The object wraps an encoder running on a dedicated thread. The various control messages + * may be sent from arbitrary threads (typically the app UI thread). The encoder thread + * manages both sides of the encoder (feeding and draining); the only external input is + * the GL texture. + *

+ * The design is complicated slightly by the need to create an EGL context that shares state + * with a view that gets restarted if (say) the device orientation changes. When the view + * in question is a GLSurfaceView, we don't have full control over the EGL context creation + * on that side, so we have to bend a bit backwards here. + *

+ * To use: + *

    + *
  • create TextureMovieEncoder object + *
  • create an EncoderConfig + *
  • call TextureMovieEncoder#startRecording() with the config + *
  • call TextureMovieEncoder#setTextureId() with the texture object that receives frames + *
  • for each frame, after latching it with SurfaceTexture#updateTexImage(), + * call TextureMovieEncoder#frameAvailable(). + *
+ *

+ */ +public class TextureMovieEncoder implements Runnable { + private static final boolean VERBOSE = false; + + private static final int MSG_START_RECORDING = 0; + private static final int MSG_STOP_RECORDING = 1; + private static final int MSG_FRAME_AVAILABLE = 2; + private static final int MSG_SET_TEXTURE_ID = 3; + private static final int MSG_UPDATE_SHARED_CONTEXT = 4; + private static final int MSG_QUIT = 5; + + // ----- accessed exclusively by encoder thread ----- + private WindowSurface mInputWindowSurface; + private EglCore mEglCore; + private FullFrameRect mFullScreen; + private int mTextureId; + private int mFrameNum; + private VideoEncoderCore mVideoEncoder; + + // ----- accessed by multiple threads ----- + private volatile EncoderHandler mHandler; + + private final Object mReadyFence = new Object(); // guards ready/running + private boolean mReady; + private boolean mRunning; + private Long mLastFrameTimeNs = null; + public Float mFrameRate = 15.f; + private final float[] STMatrix = new float[16]; + + /** + * Encoder configuration. + *

+ * Object is immutable, which means we can safely pass it between threads without + * explicit synchronization (and don't need to worry about it getting tweaked out from + * under us). + *

+ * with reasonable defaults for those and bit rate. + */ + public static class EncoderConfig { + final String mOutputFile; + final int mWidth; + final int mHeight; + final int mBitRate; + final EGLContext mEglContext; + final Texture2dProgram mProgram; + final String mMetadataFile; + + public EncoderConfig(String outputFile, int width, int height, int bitRate, + EGLContext sharedEglContext, Texture2dProgram program, String metaFile) { + mOutputFile = outputFile; + mWidth = width; + mHeight = height; + mBitRate = bitRate; + mEglContext = sharedEglContext; + mProgram = program; + mMetadataFile = metaFile; + } + + @Override + public String toString() { + return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate + + " to '" + mOutputFile + "' ctxt=" + mEglContext; + } + } + + /** + * Tells the video recorder to start recording. (Call from non-encoder thread.) + *

+ * Creates a new thread, which will create an encoder using the provided configuration. + *

+ * Returns after the recorder thread has started and is ready to accept Messages. The + * encoder may not yet be fully configured. + */ + public void startRecording(EncoderConfig config) { + Timber.d("Encoder: startRecording()"); + synchronized (mReadyFence) { + if (mRunning) { + Timber.w("Encoder thread already running"); + return; + } + mRunning = true; + + new Thread(this, "TextureMovieEncoder").start(); + + while (!mReady) { + try { + mReadyFence.wait(); + } catch (InterruptedException ie) { + // ignore + } + } + } + + mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config)); + } + + /** + * Tells the video recorder to stop recording. (Call from non-encoder thread.) + *

+ * Returns immediately; the encoder/muxer may not yet be finished creating the movie. + *

+ * so we can provide reasonable status UI (and let the caller know that movie encoding + * has completed). + */ + public void stopRecording() { + mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING)); + mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT)); + // We don't know when these will actually finish (or even start). We don't want to + // delay the UI thread though, so we return immediately. + } + + /** + * Returns true if recording has been started. + */ + public boolean isRecording() { + synchronized (mReadyFence) { + return mRunning; + } + } + + /** + * Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.) + */ + public void updateSharedContext(EGLContext sharedContext) { + mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext)); + } + + /** + * Tells the video recorder that a new frame is available. (Call from non-encoder thread.) + *

+ * This function sends a message and returns immediately. This isn't sufficient -- we + * don't want the caller to latch a new frame until we're done with this one -- but we + * can get away with it so long as the input frame rate is reasonable and the encoder + * thread doesn't stall. + *

+ * or have a separate "block if still busy" method that the caller can execute immediately + * before it calls updateTexImage(). The latter is preferred because we don't want to + * stall the caller while this thread does work. + */ + public void frameAvailable(SurfaceTexture st) { + synchronized (mReadyFence) { + if (!mReady) { + return; + } + } + + st.getTransformMatrix(STMatrix); + long timestamp = st.getTimestamp(); + if (timestamp == 0) { + // Seeing this after device is toggled off/on with power button. The + // first frame back has a zero timestamp. + // + // MPEG4Writer thinks this is cause to abort() in native code, so it's very + // important that we just ignore the frame. + Timber.w("HEY: got SurfaceTexture with timestamp of zero"); + return; + } + + mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, + (int) (timestamp >> 32), (int) timestamp, STMatrix)); + } + + /** + * Tells the video recorder what texture name to use. This is the external texture that + * we're receiving camera previews in. (Call from non-encoder thread.) + *

+ */ + public void setTextureId(int id) { + synchronized (mReadyFence) { + if (!mReady) { + return; + } + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null)); + } + + /** + * Encoder thread entry point. Establishes Looper/Handler and waits for messages. + *

+ * + * @see java.lang.Thread#run() + */ + @Override + public void run() { + // Establish a Looper for this thread, and define a Handler for it. + Looper.prepare(); + synchronized (mReadyFence) { + mHandler = new EncoderHandler(this); + mReady = true; + mReadyFence.notify(); + } + Looper.loop(); + + Timber.d("Encoder thread exiting"); + synchronized (mReadyFence) { + mReady = mRunning = false; + mHandler = null; + } + } + + + /** + * Handles encoder state change requests. The handler is created on the encoder thread. + */ + private static class EncoderHandler extends Handler { + private final WeakReference mWeakEncoder; + + public EncoderHandler(TextureMovieEncoder encoder) { + mWeakEncoder = new WeakReference<>(encoder); + } + + @Override // runs on encoder thread + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + Object obj = inputMessage.obj; + + TextureMovieEncoder encoder = mWeakEncoder.get(); + if (encoder == null) { + Timber.w("EncoderHandler.handleMessage: encoder is null"); + return; + } + + switch (what) { + case MSG_START_RECORDING: + encoder.handleStartRecording((EncoderConfig) obj); + break; + case MSG_STOP_RECORDING: + encoder.handleStopRecording(); + break; + case MSG_FRAME_AVAILABLE: + long timestamp = (((long) inputMessage.arg1) << 32) | + (((long) inputMessage.arg2) & 0xffffffffL); + encoder.handleFrameAvailable((float[]) obj, timestamp); + break; + case MSG_SET_TEXTURE_ID: + encoder.handleSetTexture(inputMessage.arg1); + break; + case MSG_UPDATE_SHARED_CONTEXT: + encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj); + break; + case MSG_QUIT: + Looper.myLooper().quit(); + break; + default: + throw new RuntimeException("Unhandled msg what=" + what); + } + } + } + + /** + * Starts recording. + */ + private void handleStartRecording(EncoderConfig config) { + Timber.d("handleStartRecording %s", config.toString()); + mFrameNum = 0; + prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate, + config.mOutputFile, config.mProgram, config.mMetadataFile); + } + + /** + * Handles notification of an available frame. + *

+ * The texture is rendered onto the encoder's input surface, along with a moving + * box (just because we can). + *

+ * + * @param transform The texture transform, from SurfaceTexture. + * @param timestampNanos The frame's timestamp, from SurfaceTexture. + */ + private void handleFrameAvailable(float[] transform, long timestampNanos) { + if (VERBOSE) Timber.d("handleFrameAvailable tr=%f", transform[0]); + mVideoEncoder.drainEncoder(false); + mFullScreen.drawFrame(mTextureId, transform); + + mInputWindowSurface.setPresentationTime(timestampNanos); + mInputWindowSurface.swapBuffers(); + + if (mLastFrameTimeNs != null) { + long gapNs = timestampNanos - mLastFrameTimeNs; + mFrameRate = mFrameRate * 0.3f + (float) (1000000000.0 / gapNs * 0.7); + } + mLastFrameTimeNs = timestampNanos; + } + + /** + * Handles a request to stop encoding. + */ + private void handleStopRecording() { + Timber.d("handleStopRecording"); + mVideoEncoder.drainEncoder(true); + releaseEncoder(); + } + + /** + * Sets the texture name that SurfaceTexture will use when frames are received. + */ + private void handleSetTexture(int id) { + mTextureId = id; + } + + /** + * Tears down the EGL surface and context we've been using to feed the MediaCodec input + * surface, and replaces it with a new one that shares with the new context. + *

+ * This is useful if the old context we were sharing with went away (maybe a GLSurfaceView + * that got torn down) and we need to hook up with the new one. + */ + private void handleUpdateSharedContext(EGLContext newSharedContext) { + Timber.d("handleUpdatedSharedContext %s", newSharedContext.toString()); + + // Release the EGLSurface and EGLContext. + mInputWindowSurface.releaseEglSurface(); + mFullScreen.release(false); + mEglCore.release(); + + // Create a new EGLContext and recreate the window surface. + mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE); + mInputWindowSurface.recreate(mEglCore); + mInputWindowSurface.makeCurrent(); + + // Create new programs and such for the new context. + mFullScreen = new FullFrameRect( + new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT)); + } + + private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate, + String outputFile, Texture2dProgram program, String metaFile) { + try { + mVideoEncoder = new VideoEncoderCore( + width, height, bitRate, outputFile, metaFile); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE); + mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true); + mInputWindowSurface.makeCurrent(); + + if (program.getProgramType() == Texture2dProgram.ProgramType.TEXTURE_EXT_FILT_VIEW) { + Texture2dProgram newProgram = new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT_FILT); + newProgram.setKernel(program.getKernel(), program.getColorAdjust()); + mFullScreen = new FullFrameRect(newProgram); + } else { + mFullScreen = new FullFrameRect(program); + } + } + + private void releaseEncoder() { + mVideoEncoder.release(); + if (mInputWindowSurface != null) { + mInputWindowSurface.release(); + mInputWindowSurface = null; + } + if (mFullScreen != null) { + mFullScreen.release(false); + mFullScreen = null; + } + if (mEglCore != null) { + mEglCore.release(); + mEglCore = null; + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/TimeBaseManager.java b/app/src/main/java/io/a3dv/VIRec/TimeBaseManager.java new file mode 100644 index 0000000..e74ba1c --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/TimeBaseManager.java @@ -0,0 +1,84 @@ +package io.a3dv.VIRec; + +import android.hardware.camera2.CameraCharacteristics; +import android.os.SystemClock; + +import java.io.BufferedWriter; +import java.io.IOException; + +import timber.log.Timber; + +public class TimeBaseManager { + public String mTimeBaseHint; + + private BufferedWriter mDataWriter = null; + + public TimeBaseManager() { + } + + public void startRecording(String captureResultFile, Integer timeSourceValue) { + mDataWriter = FileHelper.createBufferedWriter(captureResultFile); + long sysElapsedNs = SystemClock.elapsedRealtimeNanos(); + long sysNs = System.nanoTime(); + long diff = sysElapsedNs - sysNs; + setCameraTimestampSource(timeSourceValue); + try { + mDataWriter.write(mTimeBaseHint + "\n"); + mDataWriter.write("#IMU data clock\tSENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN camera clock\tDifference\n"); + mDataWriter.write("#elapsedRealtimeNanos()\tnanoTime()\tDifference\n"); + mDataWriter.write(sysElapsedNs + "\t" + sysNs + "\t" + diff + "\n"); + } catch (IOException ioe) { + Timber.e(ioe); + } + } + + public void stopRecording() { + long sysElapsedNs = SystemClock.elapsedRealtimeNanos(); + long sysNs = System.nanoTime(); + long diff = sysElapsedNs - sysNs; + try { + mDataWriter.write(sysElapsedNs + "\t" + sysNs + "\t" + diff + "\n"); + } catch (IOException ioe) { + Timber.e(ioe); + } + FileHelper.closeBufferedWriter(mDataWriter); + mDataWriter = null; + } + + private void createHeader(String timestampSource) { + mTimeBaseHint = "#Camera frame timestamp source according to CameraCharacteristics.SENSOR_INFO_" + + "TIMESTAMP_SOURCE is " + timestampSource + ".\n#" + + "If SENSOR_INFO_TIMESTAMP_SOURCE is SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME, then " + + "camera frame timestamps of the attribute CaptureResult.SENSOR_TIMESTAMP\n#" + + "and IMU reading timestamps of the field SensorEvent.timestamp " + + "are on the same timebase CLOCK_BOOTTIME which is " + + "used by elapsedRealtimeNanos().\n#" + + "In this case, no offline sync is necessary.\n#" + + "Otherwise, the camera frame timestamp is " + + "assumed to be on the timebase of CLOCK_MONOTONIC" + + " which is generally used by nanoTime().\n#" + + "In this case, offline sync is usually necessary unless the difference " + + "is really small, e.g., <1000 nanoseconds.\n#" + + "To help sync camera frames to " + + "the IMU offline, the timestamps" + + " according to the two time basis at the start and end" + + " of a recording session are recorded."; + } + + private void setCameraTimestampSource(Integer timestampSource) { + String warn_msg = "The camera timestamp source is unreliable to synchronize with motion sensors"; + String src_type = "SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN"; + if (timestampSource != null) { + if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN) { + src_type = "SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN"; + Timber.d("%s:%s", warn_msg, src_type); + } else if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) { + src_type = "SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME"; + } else { + src_type = "SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN (" + timestampSource + ")"; + Timber.d("%s:%s", warn_msg, src_type); + } + } + createHeader(src_type); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/VideoEncoderCore.java b/app/src/main/java/io/a3dv/VIRec/VideoEncoderCore.java new file mode 100644 index 0000000..fc549eb --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/VideoEncoderCore.java @@ -0,0 +1,243 @@ +package io.a3dv.VIRec; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.view.Surface; + +import androidx.annotation.NonNull; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; + +import timber.log.Timber; + +/** + * This class wraps up the core components used for surface-input video encoding. + *

+ * Once created, frames are fed to the input surface. Remember to provide the presentation + * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the + * producer side doesn't get backed up. + *

+ * This class is not thread-safe, with one exception: it is valid to use the input surface + * on one thread, and drain the output on a different thread. + */ +public class VideoEncoderCore { + private static final boolean VERBOSE = false; + + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + public static final int FRAME_RATE = 30; // 30fps + private static final int IFRAME_INTERVAL = 1; // seconds between I-frames + + private final Surface mInputSurface; + private MediaMuxer mMuxer; + private MediaCodec mEncoder; + private boolean mEncoderInExecutingState; + private final MediaCodec.BufferInfo mBufferInfo; + private int mTrackIndex; + private boolean mMuxerStarted; + private BufferedWriter mFrameMetadataWriter = null; + + static class TimePair { + public Long sensorTimeMicros; + public long unixTimeMillis; + + public TimePair(Long sensorTime, long unixTime) { + sensorTimeMicros = sensorTime; + unixTimeMillis = unixTime; + } + + @NonNull + public String toString() { + String delimiter = ","; + return sensorTimeMicros + "000" + delimiter + unixTimeMillis + "000000"; + } + } + + private final ArrayList mTimeArray; + final int TIMEOUT_USEC = 10000; + + /** + * Configures encoder and muxer state, and prepares the input Surface. + */ + public VideoEncoderCore(int width, int height, int bitRate, + String outputFile, String metaFile) + throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); + + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + if (VERBOSE) Timber.d("format: %s", format.toString()); + + // Create a MediaCodec encoder, and configure it with our format. Get a Surface + // we can use for input and wrap it with a class that handles the EGL work. + mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mEncoder.createInputSurface(); + mEncoder.start(); + + try { + mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + mEncoderInExecutingState = true; + } catch (IllegalStateException ise) { + // This exception occurs with certain devices e.g., Nexus 9 API 22. + Timber.e(ise); + mEncoderInExecutingState = false; + } + + // Create a MediaMuxer. We can't add the video track and start() the muxer here, + // because our MediaFormat doesn't have the Magic Goodies. These can only be + // obtained from the encoder after it has started processing data. + // + // We're not actually interested in multiplexing audio. We just want to convert + // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. + mMuxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + mTrackIndex = -1; + mMuxerStarted = false; + + try { + mFrameMetadataWriter = new BufferedWriter(new FileWriter(metaFile, false)); + } catch (IOException err) { + Timber.e(err, "IOException in opening frameMetadataWriter."); + } + mTimeArray = new ArrayList<>(); + } + + /** + * Returns the encoder's input surface. + */ + public Surface getInputSurface() { + return mInputSurface; + } + + /** + * Releases encoder resources. + */ + public void release() { + if (VERBOSE) Timber.d("releasing encoder objects"); + if (mEncoder != null) { + mEncoder.stop(); + mEncoder.release(); + mEncoder = null; + } + if (mMuxer != null) { + mMuxer.stop(); + mMuxer.release(); + mMuxer = null; + } + if (mFrameMetadataWriter != null) { + try { + String frameTimeHeader = "Frame timestamp[nanosec],Unix time[nanosec]\n"; + mFrameMetadataWriter.write(frameTimeHeader); + for (TimePair value : mTimeArray) { + mFrameMetadataWriter.write(value.toString() + "\n"); + } + mFrameMetadataWriter.flush(); + mFrameMetadataWriter.close(); + } catch (IOException err) { + Timber.e(err, "IOException in closing frameMetadataWriter."); + } + mFrameMetadataWriter = null; + } + } + + /** + * Extracts all pending data from the encoder and forwards it to the muxer. + *

+ * If endOfStream is not set, this returns when there is no more data to drain. If it + * is set, we send EOS to the encoder, and then iterate until we see EOS on the output. + * Calling this with endOfStream set should be done once, right before stopping the muxer. + *

+ * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're + * not recording audio. + */ + public void drainEncoder(boolean endOfStream) { + if (VERBOSE) Timber.d("drainEncoder(%b)", endOfStream); + + if (endOfStream) { + if (VERBOSE) Timber.d("sending EOS to encoder"); + mEncoder.signalEndOfInputStream(); + } + + while (mEncoderInExecutingState) { + int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (!endOfStream) { + break; // out of while + } else { + if (VERBOSE) Timber.d("no output available, spinning to await EOS"); + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // should happen before receiving buffers, and should only happen once + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mEncoder.getOutputFormat(); + Timber.d("encoder output format changed: %s", newFormat.toString()); + + // now that we have the Magic Goodies, start the muxer + mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.start(); + mMuxerStarted = true; + } else if (encoderStatus < 0) { + Timber.w("unexpected result from encoder.dequeueOutputBuffer: %d", encoderStatus); + // let's ignore it + } else { + ByteBuffer encodedData = mEncoder.getOutputBuffer(encoderStatus); +// MediaFormat bufferFormat = mEncoder.getOutputFormat(encoderStatus); + // bufferFormat is identical to newFormat + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // The codec config data was pulled out and fed to the muxer when we got + // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. + if (VERBOSE) Timber.d("ignoring BUFFER_FLAG_CODEC_CONFIG"); + mBufferInfo.size = 0; + } + + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + + // adjust the ByteBuffer values to match BufferInfo (not needed?) + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mTimeArray.add(new TimePair(mBufferInfo.presentationTimeUs, + System.currentTimeMillis())); + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + if (VERBOSE) { + Timber.d("sent %d bytes to muxer, ts=%d", + mBufferInfo.size, mBufferInfo.presentationTimeUs); + } + } + + mEncoder.releaseOutputBuffer(encoderStatus, false); + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Timber.w("reached end of stream unexpectedly"); + } else { + if (VERBOSE) Timber.d("end of stream reached"); + } + break; // out of while + } + } + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/Drawable2d.java b/app/src/main/java/io/a3dv/VIRec/gles/Drawable2d.java new file mode 100644 index 0000000..afdfd5b --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/Drawable2d.java @@ -0,0 +1,197 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import java.nio.FloatBuffer; + +/** + * Base class for stuff we like to draw. + */ +public class Drawable2d { + private static final int SIZEOF_FLOAT = 4; + + /** + * Simple equilateral triangle (1.0 per side). Centered on (0,0). + */ + private static final float TRIANGLE_COORDS[] = { + 0.0f, 0.577350269f, // 0 top + -0.5f, -0.288675135f, // 1 bottom left + 0.5f, -0.288675135f // 2 bottom right + }; + private static final float TRIANGLE_TEX_COORDS[] = { + 0.5f, 0.0f, // 0 top center + 0.0f, 1.0f, // 1 bottom left + 1.0f, 1.0f, // 2 bottom right + }; + private static final FloatBuffer TRIANGLE_BUF = + GlUtil.createFloatBuffer(TRIANGLE_COORDS); + private static final FloatBuffer TRIANGLE_TEX_BUF = + GlUtil.createFloatBuffer(TRIANGLE_TEX_COORDS); + + /** + * Simple square, specified as a triangle strip. The square is centered on (0,0) and has + * a size of 1x1. + *

+ * Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding). + */ + private static final float RECTANGLE_COORDS[] = { + -0.5f, -0.5f, // 0 bottom left + 0.5f, -0.5f, // 1 bottom right + -0.5f, 0.5f, // 2 top left + 0.5f, 0.5f, // 3 top right + }; + private static final float RECTANGLE_TEX_COORDS[] = { + 0.0f, 1.0f, // 0 bottom left + 1.0f, 1.0f, // 1 bottom right + 0.0f, 0.0f, // 2 top left + 1.0f, 0.0f // 3 top right + }; + private static final FloatBuffer RECTANGLE_BUF = + GlUtil.createFloatBuffer(RECTANGLE_COORDS); + private static final FloatBuffer RECTANGLE_TEX_BUF = + GlUtil.createFloatBuffer(RECTANGLE_TEX_COORDS); + + /** + * A "full" square, extending from -1 to +1 in both dimensions. When the model/view/projection + * matrix is identity, this will exactly cover the viewport. + *

+ * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out + * right with external textures from SurfaceTexture.) + */ + private static final float FULL_RECTANGLE_COORDS[] = { + -1.0f, -1.0f, // 0 bottom left + 1.0f, -1.0f, // 1 bottom right + -1.0f, 1.0f, // 2 top left + 1.0f, 1.0f, // 3 top right + }; + private static final float FULL_RECTANGLE_TEX_COORDS[] = { + 0.0f, 0.0f, // 0 bottom left + 1.0f, 0.0f, // 1 bottom right + 0.0f, 1.0f, // 2 top left + 1.0f, 1.0f // 3 top right + }; + private static final FloatBuffer FULL_RECTANGLE_BUF = + GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); + private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = + GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); + + + private FloatBuffer mVertexArray; + private FloatBuffer mTexCoordArray; + private int mVertexCount; + private int mCoordsPerVertex; + private int mVertexStride; + private int mTexCoordStride; + private Prefab mPrefab; + + /** + * Enum values for constructor. + */ + public enum Prefab { + TRIANGLE, RECTANGLE, FULL_RECTANGLE + } + + /** + * Prepares a drawable from a "pre-fabricated" shape definition. + *

+ * Does no EGL/GL operations, so this can be done at any time. + */ + public Drawable2d(Prefab shape) { + switch (shape) { + case TRIANGLE: + mVertexArray = TRIANGLE_BUF; + mTexCoordArray = TRIANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = TRIANGLE_COORDS.length / mCoordsPerVertex; + break; + case RECTANGLE: + mVertexArray = RECTANGLE_BUF; + mTexCoordArray = RECTANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = RECTANGLE_COORDS.length / mCoordsPerVertex; + break; + case FULL_RECTANGLE: + mVertexArray = FULL_RECTANGLE_BUF; + mTexCoordArray = FULL_RECTANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex; + break; + default: + throw new RuntimeException("Unknown shape " + shape); + } + mTexCoordStride = 2 * SIZEOF_FLOAT; + mPrefab = shape; + } + + /** + * Returns the array of vertices. + *

+ * To avoid allocations, this returns internal state. The caller must not modify it. + */ + public FloatBuffer getVertexArray() { + return mVertexArray; + } + + /** + * Returns the array of texture coordinates. + *

+ * To avoid allocations, this returns internal state. The caller must not modify it. + */ + public FloatBuffer getTexCoordArray() { + return mTexCoordArray; + } + + /** + * Returns the number of vertices stored in the vertex array. + */ + public int getVertexCount() { + return mVertexCount; + } + + /** + * Returns the width, in bytes, of the data for each vertex. + */ + public int getVertexStride() { + return mVertexStride; + } + + /** + * Returns the width, in bytes, of the data for each texture coordinate. + */ + public int getTexCoordStride() { + return mTexCoordStride; + } + + /** + * Returns the number of position coordinates per vertex. This will be 2 or 3. + */ + public int getCoordsPerVertex() { + return mCoordsPerVertex; + } + + @Override + public String toString() { + if (mPrefab != null) { + return "[Drawable2d: " + mPrefab + "]"; + } else { + return "[Drawable2d: ...]"; + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/EglCore.java b/app/src/main/java/io/a3dv/VIRec/gles/EglCore.java new file mode 100644 index 0000000..6a99a75 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/EglCore.java @@ -0,0 +1,373 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.view.Surface; + +import timber.log.Timber; + +/** + * Core EGL state (display, context, config). + *

+ * The EGLContext must only be attached to one thread at a time. This class is not thread-safe. + */ +public final class EglCore { + private static final String TAG = GlUtil.TAG; + + /** + * Constructor flag: surface must be recordable. This discourages EGL from using a + * pixel format that cannot be converted efficiently to something usable by the video + * encoder. + */ + public static final int FLAG_RECORDABLE = 0x01; + + /** + * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this + * flag, GLES2 is used. + */ + public static final int FLAG_TRY_GLES3 = 0x02; + + // Android-specific extension. + private static final int EGL_RECORDABLE_ANDROID = 0x3142; + + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + private EGLConfig mEGLConfig = null; + private int mGlVersion = -1; + + + /** + * Prepares EGL display and context. + *

+ * Equivalent to EglCore(null, 0). + */ + public EglCore() { + this(null, 0); + } + + /** + * Prepares EGL display and context. + *

+ * @param sharedContext The context to share, or null if sharing is not desired. + * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE. + */ + public EglCore(EGLContext sharedContext, int flags) { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("EGL already set up"); + } + + if (sharedContext == null) { + sharedContext = EGL14.EGL_NO_CONTEXT; + } + + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + mEGLDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + + // Try to get a GLES3 context, if requested. + if ((flags & FLAG_TRY_GLES3) != 0) { + //Timber.d("Trying GLES 3"); + EGLConfig config = getConfig(flags, 3); + if (config != null) { + int[] attrib3_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 3, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib3_list, 0); + + if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) { + //Timber.d("Got GLES 3 config"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 3; + } + } + } + if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed + //Timber.d("Trying GLES 2"); + EGLConfig config = getConfig(flags, 2); + if (config == null) { + throw new RuntimeException("Unable to find a suitable EGLConfig"); + } + int[] attrib2_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib2_list, 0); + checkEglError("eglCreateContext"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 2; + } + + // Confirm with query. + int[] values = new int[1]; + EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, + values, 0); + Timber.d("EGLContext created, client version %d", values[0]); + } + + /** + * Finds a suitable EGLConfig. + * + * @param flags Bit flags from constructor. + * @param version Must be 2 or 3. + */ + private EGLConfig getConfig(int flags, int version) { + int renderableType = EGL14.EGL_OPENGL_ES2_BIT; + if (version >= 3) { + renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; + } + + // The actual surface is generally RGBA or RGBX, so situationally omitting alpha + // doesn't really help. It can also lead to a huge performance hit on glReadPixels() + // when reading into a GL_RGBA buffer. + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_ALPHA_SIZE, 8, + //EGL14.EGL_DEPTH_SIZE, 16, + //EGL14.EGL_STENCIL_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, renderableType, + EGL14.EGL_NONE, 0, // placeholder for recordable [@-3] + EGL14.EGL_NONE + }; + if ((flags & FLAG_RECORDABLE) != 0) { + attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID; + attribList[attribList.length - 2] = 1; + } + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, + numConfigs, 0)) { + Timber.w("unable to find RGB8888 / %d EGLConfig", version); + return null; + } + return configs[0]; + } + + /** + * Discards all resources held by this class, notably the EGL context. This must be + * called from the thread where the context was created. + *

+ * On completion, no context will be current. + */ + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // Android is unusual in that it uses a reference-counted EGLDisplay. So for + // every eglInitialize() we need an eglTerminate(). + EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + mEGLConfig = null; + } + + @Override + protected void finalize() throws Throwable { + try { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // We're limited here -- finalizers don't run on the thread that holds + // the EGL state, so if a surface or context is still current on another + // thread we can't fully release it here. Exceptions thrown from here + // are quietly discarded. Complain in the log file. + Timber.w("WARNING: EglCore was not explicitly released -- state may be leaked"); + release(); + } + } finally { + super.finalize(); + } + } + + /** + * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's + * still current in a context. + */ + public void releaseSurface(EGLSurface eglSurface) { + EGL14.eglDestroySurface(mEGLDisplay, eglSurface); + } + + /** + * Creates an EGL surface associated with a Surface. + *

+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute. + */ + public EGLSurface createWindowSurface(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new RuntimeException("invalid surface: " + surface); + } + + // Create a window surface, and attach it to the Surface we received. + int[] surfaceAttribs = { + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, + surfaceAttribs, 0); + checkEglError("eglCreateWindowSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Creates an EGL surface associated with an offscreen buffer. + */ + public EGLSurface createOffscreenSurface(int width, int height) { + int[] surfaceAttribs = { + EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, + surfaceAttribs, 0); + checkEglError("eglCreatePbufferSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Makes our EGL context current, using the supplied surface for both "draw" and "read". + */ + public void makeCurrent(EGLSurface eglSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Timber.d("NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Makes our EGL context current, using the supplied "draw" and "read" surfaces. + */ + public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Timber.d("NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent(draw,read) failed"); + } + } + + /** + * Makes no context current. + */ + public void makeNothingCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + * @return false on failure + */ + public boolean swapBuffers(EGLSurface eglSurface) { + return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface); + } + + /** + * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. + */ + public void setPresentationTime(EGLSurface eglSurface, long nsecs) { + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs); + } + + /** + * Returns true if our context and the specified surface are current. + */ + public boolean isCurrent(EGLSurface eglSurface) { + return mEGLContext.equals(EGL14.eglGetCurrentContext()) && + eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW)); + } + + /** + * Performs a simple surface query. + */ + public int querySurface(EGLSurface eglSurface, int what) { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0); + return value[0]; + } + + /** + * Queries a string value. + */ + public String queryString(int what) { + return EGL14.eglQueryString(mEGLDisplay, what); + } + + /** + * Returns the GLES version this context is configured for (currently 2 or 3). + */ + public int getGlVersion() { + return mGlVersion; + } + + /** + * Writes the current display, context, and surface to the log. + */ + public static void logCurrent(String msg) { + EGLDisplay display; + EGLContext context; + EGLSurface surface; + + display = EGL14.eglGetCurrentDisplay(); + context = EGL14.eglGetCurrentContext(); + surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW); + Timber.i("Current EGL (%s): display=%s, context=%s, surface=%s", + msg, display.toString(), context.toString(), surface.toString()); + } + + /** + * Checks for EGL errors. Throws an exception if an error has been raised. + */ + private void checkEglError(String msg) { + int error; + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/EglSurfaceBase.java b/app/src/main/java/io/a3dv/VIRec/gles/EglSurfaceBase.java new file mode 100644 index 0000000..a4f2d13 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/EglSurfaceBase.java @@ -0,0 +1,186 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import android.graphics.Bitmap; +import android.opengl.EGL14; +import android.opengl.EGLSurface; +import android.opengl.GLES20; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +import timber.log.Timber; + +/** + * Common base class for EGL surfaces. + *

+ * There can be multiple surfaces associated with a single context. + */ +public class EglSurfaceBase { + // EglCore object we're associated with. It may be associated with multiple surfaces. + protected EglCore mEglCore; + + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + private int mWidth = -1; + private int mHeight = -1; + + protected EglSurfaceBase(EglCore eglCore) { + mEglCore = eglCore; + } + + /** + * Creates a window surface. + *

+ * @param surface May be a Surface or SurfaceTexture. + */ + public void createWindowSurface(Object surface) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createWindowSurface(surface); + + // Don't cache width/height here, because the size of the underlying surface can change + // out from under us (see e.g. HardwareScalerActivity). + //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } + + /** + * Creates an off-screen surface. + */ + public void createOffscreenSurface(int width, int height) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createOffscreenSurface(width, height); + mWidth = width; + mHeight = height; + } + + /** + * Returns the surface's width, in pixels. + *

+ * If this is called on a window surface, and the underlying surface is in the process + * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged" + * callback). The size should match after the next buffer swap. + */ + public int getWidth() { + if (mWidth < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + } else { + return mWidth; + } + } + + /** + * Returns the surface's height, in pixels. + */ + public int getHeight() { + if (mHeight < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } else { + return mHeight; + } + } + + /** + * Release the EGL surface. + */ + public void releaseEglSurface() { + mEglCore.releaseSurface(mEGLSurface); + mEGLSurface = EGL14.EGL_NO_SURFACE; + mWidth = mHeight = -1; + } + + /** + * Makes our EGL context and surface current. + */ + public void makeCurrent() { + mEglCore.makeCurrent(mEGLSurface); + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + */ + public void swapBuffers() { + boolean result = mEglCore.swapBuffers(mEGLSurface); + if (!result) { + Timber.d("WARNING: swapBuffers() failed"); + } + } + + /** + * Sends the presentation time stamp to EGL. + * + * @param nsecs Timestamp, in nanoseconds. + */ + public void setPresentationTime(long nsecs) { + mEglCore.setPresentationTime(mEGLSurface, nsecs); + } + + /** + * Saves the EGL surface to a file. + *

+ * Expects that this object's EGL surface is current. + */ + public void saveFrame(File file) throws IOException { + if (!mEglCore.isCurrent(mEGLSurface)) { + throw new RuntimeException("Expected EGL context/surface is not current"); + } + + // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA + // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap + // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the + // Bitmap "copy pixels" method wants the same format GL provides. + // + // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling + // here often. + // + // Making this even more interesting is the upside-down nature of GL, which means + // our output will look upside down relative to what appears on screen if the + // typical GL conventions are used. + + String filename = file.toString(); + + int width = getWidth(); + int height = getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); + buf.order(ByteOrder.LITTLE_ENDIAN); + GLES20.glReadPixels(0, 0, width, height, + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); + GlUtil.checkGlError("glReadPixels"); + buf.rewind(); + + BufferedOutputStream bos = null; + try { + bos = new BufferedOutputStream(new FileOutputStream(filename)); + Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bmp.copyPixelsFromBuffer(buf); + bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); + bmp.recycle(); + } finally { + if (bos != null) bos.close(); + } + Timber.d("Saved %dx%d frame as '%s'", width, height, filename); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/FullFrameRect.java b/app/src/main/java/io/a3dv/VIRec/gles/FullFrameRect.java new file mode 100644 index 0000000..4928d58 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/FullFrameRect.java @@ -0,0 +1,89 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +/** + * This class essentially represents a viewport-sized sprite that will be rendered with + * a texture, usually from an external source like the camera or video decoder. + */ +public class FullFrameRect { + private final Drawable2d mRectDrawable = new Drawable2d(Drawable2d.Prefab.FULL_RECTANGLE); + private Texture2dProgram mProgram; + + /** + * Prepares the object. + * + * @param program The program to use. FullFrameRect takes ownership, and will release + * the program when no longer needed. + */ + public FullFrameRect(Texture2dProgram program) { + mProgram = program; + } + + /** + * Releases resources. + *

+ * This must be called with the appropriate EGL context current (i.e. the one that was + * current when the constructor was called). If we're about to destroy the EGL context, + * there's no value in having the caller make it current just to do this cleanup, so you + * can pass a flag that will tell this function to skip any EGL-context-specific cleanup. + */ + public void release(boolean doEglCleanup) { + if (mProgram != null) { + if (doEglCleanup) { + mProgram.release(); + } + mProgram = null; + } + } + + /** + * Returns the program currently in use. + */ + public Texture2dProgram getProgram() { + return mProgram; + } + + /** + * Changes the program. The previous program will be released. + *

+ * The appropriate EGL context must be current. + */ + public void changeProgram(Texture2dProgram program) { + mProgram.release(); + mProgram = program; + } + + /** + * Creates a texture object suitable for use with drawFrame(). + */ + public int createTextureObject() { + return mProgram.createTextureObject(); + } + + /** + * Draws a viewport-filling rect, texturing it with the specified texture object. + */ + public void drawFrame(int textureId, float[] texMatrix) { + // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. + mProgram.draw(GlUtil.IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0, + mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(), + mRectDrawable.getVertexStride(), + texMatrix, mRectDrawable.getTexCoordArray(), textureId, + mRectDrawable.getTexCoordStride()); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/GlUtil.java b/app/src/main/java/io/a3dv/VIRec/gles/GlUtil.java new file mode 100644 index 0000000..9e29839 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/GlUtil.java @@ -0,0 +1,144 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import android.opengl.GLES20; +import android.opengl.Matrix; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +import timber.log.Timber; + +/** + * Some OpenGL utility functions. + */ +public class GlUtil { + public static final String TAG = "Grafika"; + + /** + * Identity matrix for general use. Don't modify or life will get weird. + */ + public static final float[] IDENTITY_MATRIX; + + static { + IDENTITY_MATRIX = new float[16]; + Matrix.setIdentityM(IDENTITY_MATRIX, 0); + } + + private static final int SIZEOF_FLOAT = 4; + + + private GlUtil() { + } // do not instantiate + + /** + * Creates a new program from the supplied vertex and fragment shaders. + * + * @return A handle to the program, or 0 on failure. + */ + public static int createProgram(String vertexSource, String fragmentSource) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (pixelShader == 0) { + return 0; + } + + int program = GLES20.glCreateProgram(); + checkGlError("glCreateProgram"); + if (program == 0) { + Timber.e("Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + GLES20.glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Timber.e("Could not link program: "); + Timber.e(GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + + /** + * Compiles the provided shader source. + * + * @return A handle to the shader, or 0 on failure. + */ + public static int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + checkGlError("glCreateShader type=" + shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Timber.e("Could not compile shader %d: %s", + shaderType, GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + + /** + * Checks to see if a GLES error has been raised. + */ + public static void checkGlError(String op) { + int error = GLES20.glGetError(); + if (error != GLES20.GL_NO_ERROR) { + String msg = op + ": glError 0x" + Integer.toHexString(error); + Timber.e(msg); + throw new RuntimeException(msg); + } + } + + /** + * Checks to see if the location we obtained is valid. GLES returns -1 if a label + * could not be found, but does not set the GL error. + *

+ * Throws a RuntimeException if the location is invalid. + */ + public static void checkLocation(int location, String label) { + if (location < 0) { + throw new RuntimeException("Unable to locate '" + label + "' in program"); + } + } + + /** + * Allocates a direct float buffer, and populates it with the float array data. + */ + public static FloatBuffer createFloatBuffer(float[] coords) { + // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. + ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT); + bb.order(ByteOrder.nativeOrder()); + FloatBuffer fb = bb.asFloatBuffer(); + fb.put(coords); + fb.position(0); + return fb; + } + +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/OffscreenSurface.java b/app/src/main/java/io/a3dv/VIRec/gles/OffscreenSurface.java new file mode 100644 index 0000000..737fe86 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/OffscreenSurface.java @@ -0,0 +1,39 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +/** + * Off-screen EGL surface (pbuffer). + *

+ * It's good practice to explicitly release() the surface, preferably from a "finally" block. + */ +public class OffscreenSurface extends EglSurfaceBase { + /** + * Creates an off-screen surface with the specified width and height. + */ + public OffscreenSurface(EglCore eglCore, int width, int height) { + super(eglCore); + createOffscreenSurface(width, height); + } + + /** + * Releases any resources associated with the surface. + */ + public void release() { + releaseEglSurface(); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/Texture2dProgram.java b/app/src/main/java/io/a3dv/VIRec/gles/Texture2dProgram.java new file mode 100644 index 0000000..7999d44 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/Texture2dProgram.java @@ -0,0 +1,374 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; + +import java.nio.FloatBuffer; + +import timber.log.Timber; + +/** + * GL program and supporting functions for textured 2D shapes. + */ +public class Texture2dProgram { + public enum ProgramType { + TEXTURE_2D, TEXTURE_EXT, TEXTURE_EXT_BW, TEXTURE_EXT_FILT, TEXTURE_EXT_FILT_VIEW + } + + // Simple vertex shader, used for all programs. + private static final String VERTEX_SHADER = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uTexMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n" + + "}\n"; + + // Simple fragment shader for use with "normal" 2D textures. + private static final String FRAGMENT_SHADER_2D = + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform sampler2D sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + + // Simple fragment shader for use with external 2D textures (e.g. what we get from + // SurfaceTexture). + private static final String FRAGMENT_SHADER_EXT = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + + // Fragment shader that converts color to black & white with a simple transformation. + private static final String FRAGMENT_SHADER_EXT_BW = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " vec4 tc = texture2D(sTexture, vTextureCoord);\n" + + " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" + + " gl_FragColor = vec4(color, color, color, 1.0);\n" + + "}\n"; + + // Fragment shader with a convolution filter. The upper-left half will be drawn normally, + // the lower-right half will have the filter applied, and a thin red line will be drawn + // at the border. + // + // This is not optimized for performance. Some things that might make this faster: + // - Remove the conditionals. They're used to present a half & half view with a red + // stripe across the middle, but that's only useful for a demo. + // - Unroll the loop. Ideally the compiler does this for you when it's beneficial. + // - Bake the filter kernel into the shader, instead of passing it through a uniform + // array. That, combined with loop unrolling, should reduce memory accesses. + public static final int KERNEL_SIZE = 9; + private static final String FRAGMENT_SHADER_EXT_FILT_VIEW = + "#extension GL_OES_EGL_image_external : require\n" + + "#define KERNEL_SIZE " + KERNEL_SIZE + "\n" + + "precision highp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "uniform float uKernel[KERNEL_SIZE];\n" + + "uniform vec2 uTexOffset[KERNEL_SIZE];\n" + + "uniform float uColorAdjust;\n" + + "void main() {\n" + + " int i = 0;\n" + + " vec4 sum = vec4(0.0);\n" + + " if (vTextureCoord.x < vTextureCoord.y - 0.005) {\n" + + " for (i = 0; i < KERNEL_SIZE; i++) {\n" + + " vec4 texc = texture2D(sTexture, vTextureCoord + uTexOffset[i]);\n" + + " sum += texc * uKernel[i];\n" + + " }\n" + + " sum += uColorAdjust;\n" + + " } else if (vTextureCoord.x > vTextureCoord.y + 0.005) {\n" + + " sum = texture2D(sTexture, vTextureCoord);\n" + + " } else {\n" + + " sum.r = 1.0;\n" + + " }\n" + + " gl_FragColor = sum;\n" + + "}\n"; + + private static final String FRAGMENT_SHADER_EXT_FILT = + "#extension GL_OES_EGL_image_external : require\n" + + "#define KERNEL_SIZE " + KERNEL_SIZE + "\n" + + "precision highp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "uniform float uKernel[KERNEL_SIZE];\n" + + "uniform vec2 uTexOffset[KERNEL_SIZE];\n" + + "uniform float uColorAdjust;\n" + + "void main() {\n" + + " int i = 0;\n" + + " vec4 sum = vec4(0.0);\n" + + " for (i = 0; i < KERNEL_SIZE; i++) {\n" + + " vec4 texc = texture2D(sTexture, vTextureCoord + uTexOffset[i]);\n" + + " sum += texc * uKernel[i];\n" + + " }\n" + + " sum += uColorAdjust;\n" + + " gl_FragColor = sum;\n" + + "}\n"; + + private final ProgramType mProgramType; + + // Handles to the GL program and various components of it. + private int mProgramHandle; + private final int muMVPMatrixLoc; + private final int muTexMatrixLoc; + private int muKernelLoc; + private final int muTexOffsetLoc; + private final int muColorAdjustLoc; + private final int maPositionLoc; + private final int maTextureCoordLoc; + + private final int mTextureTarget; + + private final float[] mKernel = new float[KERNEL_SIZE]; + private float[] mTexOffset; + private float mColorAdjust; + + + /** + * Prepares the program in the current EGL context. + */ + public Texture2dProgram(ProgramType programType) { + mProgramType = programType; + + switch (programType) { + case TEXTURE_2D: + mTextureTarget = GLES20.GL_TEXTURE_2D; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_2D); + break; + case TEXTURE_EXT: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT); + break; + case TEXTURE_EXT_BW: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_BW); + break; + case TEXTURE_EXT_FILT: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_FILT); + break; + case TEXTURE_EXT_FILT_VIEW: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_FILT_VIEW); + break; + default: + throw new RuntimeException("Unhandled type " + programType); + } + if (mProgramHandle == 0) { + throw new RuntimeException("Unable to create program"); + } + Timber.d("Created program %d (%s)", mProgramHandle, programType.toString()); + + // get locations of attributes and uniforms + + maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition"); + GlUtil.checkLocation(maPositionLoc, "aPosition"); + maTextureCoordLoc = GLES20.glGetAttribLocation(mProgramHandle, "aTextureCoord"); + GlUtil.checkLocation(maTextureCoordLoc, "aTextureCoord"); + muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix"); + GlUtil.checkLocation(muMVPMatrixLoc, "uMVPMatrix"); + muTexMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexMatrix"); + GlUtil.checkLocation(muTexMatrixLoc, "uTexMatrix"); + muKernelLoc = GLES20.glGetUniformLocation(mProgramHandle, "uKernel"); + if (muKernelLoc < 0) { + // no kernel in this one + muKernelLoc = -1; + muTexOffsetLoc = -1; + muColorAdjustLoc = -1; + } else { + // has kernel, must also have tex offset and color adj + muTexOffsetLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexOffset"); + GlUtil.checkLocation(muTexOffsetLoc, "uTexOffset"); + muColorAdjustLoc = GLES20.glGetUniformLocation(mProgramHandle, "uColorAdjust"); + GlUtil.checkLocation(muColorAdjustLoc, "uColorAdjust"); + + // initialize default values + setKernel(new float[]{0f, 0f, 0f, 0f, 1f, 0f, 0f, 0f, 0f}, 0f); + setTexSize(256, 256); + } + } + + /** + * Releases the program. + *

+ * The appropriate EGL context must be current (i.e. the one that was used to create + * the program). + */ + public void release() { + Timber.d("deleting program %d", mProgramHandle); + GLES20.glDeleteProgram(mProgramHandle); + mProgramHandle = -1; + } + + /** + * Returns the program type. + */ + public ProgramType getProgramType() { + return mProgramType; + } + + /** + * Creates a texture object suitable for use with this program. + *

+ * On exit, the texture will be bound. + */ + public int createTextureObject() { + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + GlUtil.checkGlError("glGenTextures"); + + int texId = textures[0]; + GLES20.glBindTexture(mTextureTarget, texId); + GlUtil.checkGlError("glBindTexture " + texId); + + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_NEAREST); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, + GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, + GLES20.GL_CLAMP_TO_EDGE); + GlUtil.checkGlError("glTexParameter"); + + return texId; + } + + /** + * Configures the convolution filter values. + * + * @param values Normalized filter values; must be KERNEL_SIZE elements. + */ + public void setKernel(float[] values, float colorAdj) { + if (values.length != KERNEL_SIZE) { + throw new IllegalArgumentException("Kernel size is " + values.length + + " vs. " + KERNEL_SIZE); + } + System.arraycopy(values, 0, mKernel, 0, KERNEL_SIZE); + mColorAdjust = colorAdj; + // Timber.d("filt kernel: %s, adj=%f", Arrays.toString(mKernel), colorAdj); + } + + public float[] getKernel() { + return mKernel; + } + + public float getColorAdjust() { + return mColorAdjust; + } + + /** + * Sets the size of the texture. This is used to find adjacent texels when filtering. + */ + public void setTexSize(int width, int height) { + float rw = 1.0f / width; + float rh = 1.0f / height; + + // Don't need to create a new array here, but it's syntactically convenient. + mTexOffset = new float[]{ + -rw, -rh, 0f, -rh, rw, -rh, + -rw, 0f, 0f, 0f, rw, 0f, + -rw, rh, 0f, rh, rw, rh + }; + // Timber.d("filt size: %dx%d: %s", width, height, Arrays.toString(mTexOffset)); + } + + /** + * Issues the draw call. Does the full setup on every call. + * + * @param mvpMatrix The 4x4 projection matrix. + * @param vertexBuffer Buffer with vertex position data. + * @param firstVertex Index of first vertex to use in vertexBuffer. + * @param vertexCount Number of vertices in vertexBuffer. + * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2). + * @param vertexStride Width, in bytes, of the position data for each vertex (often + * vertexCount * sizeof(float)). + * @param texMatrix A 4x4 transformation matrix for texture coords. (Primarily intended + * for use with SurfaceTexture.) + * @param texBuffer Buffer with vertex texture data. + * @param texStride Width, in bytes, of the texture data for each vertex. + */ + public void draw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex, + int vertexCount, int coordsPerVertex, int vertexStride, + float[] texMatrix, FloatBuffer texBuffer, int textureId, int texStride) { + GlUtil.checkGlError("draw start"); + + // Select the program. + GLES20.glUseProgram(mProgramHandle); + GlUtil.checkGlError("glUseProgram"); + + // Set the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(mTextureTarget, textureId); + + // Copy the model / view / projection matrix over. + GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0); + GlUtil.checkGlError("glUniformMatrix4fv"); + + // Copy the texture transformation matrix over. + GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, texMatrix, 0); + GlUtil.checkGlError("glUniformMatrix4fv"); + + // Enable the "aPosition" vertex attribute. + GLES20.glEnableVertexAttribArray(maPositionLoc); + GlUtil.checkGlError("glEnableVertexAttribArray"); + + // Connect vertexBuffer to "aPosition". + GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex, + GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); + GlUtil.checkGlError("glVertexAttribPointer"); + + // Enable the "aTextureCoord" vertex attribute. + GLES20.glEnableVertexAttribArray(maTextureCoordLoc); + GlUtil.checkGlError("glEnableVertexAttribArray"); + + // Connect texBuffer to "aTextureCoord". + GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, + GLES20.GL_FLOAT, false, texStride, texBuffer); + GlUtil.checkGlError("glVertexAttribPointer"); + + // Populate the convolution kernel, if present. + if (muKernelLoc >= 0) { + GLES20.glUniform1fv(muKernelLoc, KERNEL_SIZE, mKernel, 0); + GLES20.glUniform2fv(muTexOffsetLoc, KERNEL_SIZE, mTexOffset, 0); + GLES20.glUniform1f(muColorAdjustLoc, mColorAdjust); + } + + // Draw the rect. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount); + GlUtil.checkGlError("glDrawArrays"); + + // Done -- disable vertex array, texture, and program. + GLES20.glDisableVertexAttribArray(maPositionLoc); + GLES20.glDisableVertexAttribArray(maTextureCoordLoc); + GLES20.glBindTexture(mTextureTarget, 0); + GLES20.glUseProgram(0); + } +} diff --git a/app/src/main/java/io/a3dv/VIRec/gles/WindowSurface.java b/app/src/main/java/io/a3dv/VIRec/gles/WindowSurface.java new file mode 100644 index 0000000..3d38a28 --- /dev/null +++ b/app/src/main/java/io/a3dv/VIRec/gles/WindowSurface.java @@ -0,0 +1,90 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.a3dv.VIRec.gles; + +import android.graphics.SurfaceTexture; +import android.view.Surface; + +/** + * Recordable EGL window surface. + *

+ * It's good practice to explicitly release() the surface, preferably from a "finally" block. + */ +public class WindowSurface extends EglSurfaceBase { + private Surface mSurface; + private boolean mReleaseSurface; + + /** + * Associates an EGL surface with the native window surface. + *

+ * Set releaseSurface to true if you want the Surface to be released when release() is + * called. This is convenient, but can interfere with framework classes that expect to + * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the + * surfaceDestroyed() callback won't fire). + */ + public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { + super(eglCore); + createWindowSurface(surface); + mSurface = surface; + mReleaseSurface = releaseSurface; + } + + /** + * Associates an EGL surface with the SurfaceTexture. + */ + public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) { + super(eglCore); + createWindowSurface(surfaceTexture); + } + + /** + * Releases any resources associated with the EGL surface (and, if configured to do so, + * with the Surface as well). + *

+ * Does not require that the surface's EGL context be current. + */ + public void release() { + releaseEglSurface(); + if (mSurface != null) { + if (mReleaseSurface) { + mSurface.release(); + } + mSurface = null; + } + } + + /** + * Recreate the EGLSurface, using the new EglBase. The caller should have already + * freed the old EGLSurface with releaseEglSurface(). + *

+ * This is useful when we want to update the EGLSurface associated with a Surface. + * For example, if we want to share with a different EGLContext, which can only + * be done by tearing down and recreating the context. (That's handled by the caller; + * this just creates a new EGLSurface for the Surface we were handed earlier.) + *

+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a + * context somewhere, the create call will fail with complaints from the Surface + * about already being connected. + */ + public void recreate(EglCore newEglCore) { + if (mSurface == null) { + throw new RuntimeException("not yet implemented for SurfaceTexture"); + } + mEglCore = newEglCore; // switch to new context + createWindowSurface(mSurface); // create new surface + } +} diff --git a/app/src/main/res/drawable/ic_baseline_fiber_manual_record_24.xml b/app/src/main/res/drawable/ic_baseline_fiber_manual_record_24.xml new file mode 100644 index 0000000..83e0ffd --- /dev/null +++ b/app/src/main/res/drawable/ic_baseline_fiber_manual_record_24.xml @@ -0,0 +1,10 @@ + + + diff --git a/app/src/main/res/drawable/ic_baseline_more_vert_24.xml b/app/src/main/res/drawable/ic_baseline_more_vert_24.xml new file mode 100644 index 0000000..50e1fdc --- /dev/null +++ b/app/src/main/res/drawable/ic_baseline_more_vert_24.xml @@ -0,0 +1,10 @@ + + + diff --git a/app/src/main/res/drawable/ic_baseline_stop_24.xml b/app/src/main/res/drawable/ic_baseline_stop_24.xml new file mode 100644 index 0000000..b422959 --- /dev/null +++ b/app/src/main/res/drawable/ic_baseline_stop_24.xml @@ -0,0 +1,10 @@ + + + diff --git a/app/src/main/res/drawable/rounded_button.xml b/app/src/main/res/drawable/rounded_button.xml new file mode 100644 index 0000000..510dd10 --- /dev/null +++ b/app/src/main/res/drawable/rounded_button.xml @@ -0,0 +1,14 @@ + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/rounded_transparent_button.xml b/app/src/main/res/drawable/rounded_transparent_button.xml new file mode 100644 index 0000000..8a22d32 --- /dev/null +++ b/app/src/main/res/drawable/rounded_transparent_button.xml @@ -0,0 +1,14 @@ + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/about_activity.xml b/app/src/main/res/layout/about_activity.xml new file mode 100644 index 0000000..0b81f8a --- /dev/null +++ b/app/src/main/res/layout/about_activity.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + diff --git a/app/src/main/res/layout/camera_activity.xml b/app/src/main/res/layout/camera_activity.xml new file mode 100644 index 0000000..fda26b1 --- /dev/null +++ b/app/src/main/res/layout/camera_activity.xml @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/layout/content_preview.xml b/app/src/main/res/layout/content_preview.xml new file mode 100644 index 0000000..9a892af --- /dev/null +++ b/app/src/main/res/layout/content_preview.xml @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/layout/imu_fragment.xml b/app/src/main/res/layout/imu_fragment.xml new file mode 100644 index 0000000..91e2a73 --- /dev/null +++ b/app/src/main/res/layout/imu_fragment.xml @@ -0,0 +1,28 @@ + + + + + + + + + + diff --git a/app/src/main/res/layout/imu_list_fragment.xml b/app/src/main/res/layout/imu_list_fragment.xml new file mode 100644 index 0000000..80599e0 --- /dev/null +++ b/app/src/main/res/layout/imu_list_fragment.xml @@ -0,0 +1,13 @@ + + \ No newline at end of file diff --git a/app/src/main/res/layout/main_activity.xml b/app/src/main/res/layout/main_activity.xml new file mode 100644 index 0000000..1a79065 --- /dev/null +++ b/app/src/main/res/layout/main_activity.xml @@ -0,0 +1,6 @@ + + diff --git a/app/src/main/res/layout/menu_intent_activity.xml b/app/src/main/res/layout/menu_intent_activity.xml new file mode 100644 index 0000000..a8fac3b --- /dev/null +++ b/app/src/main/res/layout/menu_intent_activity.xml @@ -0,0 +1,12 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/menu/popup_menu.xml b/app/src/main/res/menu/popup_menu.xml new file mode 100644 index 0000000..b28b4d9 --- /dev/null +++ b/app/src/main/res/menu/popup_menu.xml @@ -0,0 +1,16 @@ + +

+ + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/mipmap-hdpi/ic_launcher.png b/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000..72f27af Binary files /dev/null and b/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 0000000..7c06f4e Binary files /dev/null and b/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/app/src/main/res/mipmap-mdpi/ic_launcher.png b/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000..e17a789 Binary files /dev/null and b/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 0000000..0bcb220 Binary files /dev/null and b/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000..daba726 Binary files /dev/null and b/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 0000000..e50f211 Binary files /dev/null and b/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000..a4b0ae5 Binary files /dev/null and b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..dea5723 Binary files /dev/null and b/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000..dbda5fb Binary files /dev/null and b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..d72210e Binary files /dev/null and b/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/app/src/main/res/values/arrays.xml b/app/src/main/res/values/arrays.xml new file mode 100644 index 0000000..3c89f55 --- /dev/null +++ b/app/src/main/res/values/arrays.xml @@ -0,0 +1,16 @@ + + + + Delay Fastest + Delay Game + Delay UI + Delay Normal + + + + 0 + 1 + 2 + 3 + + \ No newline at end of file diff --git a/app/src/main/res/values/colors.xml b/app/src/main/res/values/colors.xml new file mode 100644 index 0000000..09a9e48 --- /dev/null +++ b/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #E02020 + #B01010 + #E02020 + diff --git a/app/src/main/res/values/dimens.xml b/app/src/main/res/values/dimens.xml new file mode 100644 index 0000000..fe995c6 --- /dev/null +++ b/app/src/main/res/values/dimens.xml @@ -0,0 +1,9 @@ + + + 16dp + 16dp + 4dp + 4dp + 16dp + 8dp + diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml new file mode 100644 index 0000000..c5f33ef --- /dev/null +++ b/app/src/main/res/values/strings.xml @@ -0,0 +1,48 @@ + + + + VIRec + Output folder: + [dir name] + Record + Stop + #Snapshot + 0 + GPS is disabled! + Looking for GPS data… + + + Normal + Black & white + Blur + Sharpen + Edge detect + Emboss + + + The source code for the app is available at + About + IMU + Settings + focal length : exposure time + [?] + focal length : exposure time + VIRec is developed by A3DV team for efficiently recording synced dual camera, IMU and GPS data on a mobile device. + Record + Stop + More Options + + \n + All data sessions are saved in \'/Android/data/io.a3dv.VIRec/files/data/\' on the SD card. Each data session in a folder named after the local time at the start of recording has nine files:\n\n + 1. movie.mp4: Main camera frames.\n + 2. movie2.mp4: Secondary camera frames.\n + 3. frame_timestamps.txt: timestamp for each frame of main camera.\n + 4. frame_timestamps2.txt: timestamp for each frame of secondary camera.\n + 5. movie_metadata.csv: Main camera metadata including focal distance, focal length in pixels, etc.\n + 6. movie_metadata2.csv: Secondary camera metadata including focal distance, focal length in pixels, etc.\n + 7. edge_epochs.txt: timestamps at the start and end of recording.\n + 8. gyro_accel.csv: IMU data.\n + 9. location.csv: GPS data.\n + + VIRec v%1$s + diff --git a/app/src/main/res/values/styles.xml b/app/src/main/res/values/styles.xml new file mode 100644 index 0000000..67bd3e8 --- /dev/null +++ b/app/src/main/res/values/styles.xml @@ -0,0 +1,24 @@ + + + + + + + + + diff --git a/app/src/main/res/xml/settings.xml b/app/src/main/res/xml/settings.xml new file mode 100644 index 0000000..b6cf824 --- /dev/null +++ b/app/src/main/res/xml/settings.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..79d1d23 --- /dev/null +++ b/build.gradle @@ -0,0 +1,21 @@ +buildscript { + + repositories { + google() + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:4.1.3' + } +} + +allprojects { + repositories { + google() + mavenCentral() + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 0000000..dc251f5 --- /dev/null +++ b/gradle.properties @@ -0,0 +1,15 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +android.useAndroidX=true +android.enableJetifier=true \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..7a3265e Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..13a61e3 --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Fri May 28 14:37:21 IRDT 2021 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip diff --git a/gradlew b/gradlew new file mode 100755 index 0000000..cccdd3d --- /dev/null +++ b/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..e95643d --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..e7b4def --- /dev/null +++ b/settings.gradle @@ -0,0 +1 @@ +include ':app'