diff --git a/android/material-showcase/app/build.gradle b/android/material-showcase/app/build.gradle index cd8be74044..1b56c0fb23 100644 --- a/android/material-showcase/app/build.gradle +++ b/android/material-showcase/app/build.gradle @@ -6,7 +6,7 @@ android { compileSdkVersion 31 defaultConfig { applicationId "com.google.mlkit.md" - minSdkVersion 19 + minSdkVersion 21 targetSdkVersion 31 versionCode 1 versionName "1.0" diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt index e78a648183..7d49898936 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt @@ -41,6 +41,7 @@ import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState import com.google.mlkit.md.camera.CameraSource +import com.google.mlkit.md.camera.CameraSourceFactory import com.google.mlkit.md.camera.CameraSourcePreview import com.google.mlkit.md.objectdetection.MultiObjectProcessor import com.google.mlkit.md.objectdetection.ProminentObjectProcessor @@ -81,7 +82,7 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@CustomModelObjectDetectionActivity) - cameraSource = CameraSource(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) promptChipAnimator = @@ -160,15 +161,15 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener R.id.flash_button -> { if (flashButton?.isSelected == true) { flashButton?.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.setFlashStatus(false) } else { flashButton?.isSelected = true - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource?.setFlashStatus(true) } } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } @@ -275,11 +276,11 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener // Observes changes on the object to search, if happens, show detected object labels as // product search results. - objectToSearch.observe(this@CustomModelObjectDetectionActivity, Observer { detectObject -> - val productList: List = detectObject.labels.map { label -> + objectToSearch.observe(this@CustomModelObjectDetectionActivity, Observer { confirmedObject -> + val productList: List = confirmedObject.labels.map { label -> Product("" /* imageUrl */, label.text, "" /* subtitle */) } - workflowModel?.onSearchCompleted(detectObject, productList) + workflowModel?.onSearchCompleted(confirmedObject, productList) }) // Observes changes on the object that has search completed, if happens, show the bottom sheet diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt index b18378e097..02daa75994 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt @@ -17,10 +17,12 @@ package com.google.mlkit.md import android.graphics.Bitmap +import android.media.Image import com.google.mlkit.md.camera.FrameMetadata import java.nio.ByteBuffer interface InputInfo { + //TODO: Make it optional fun getBitmap(): Bitmap } @@ -42,6 +44,22 @@ class CameraInputInfo( } } +class Camera2InputInfo( + private val frameImage: Image, + private val frameRotation: Int +) : InputInfo { + + private var bitmap: Bitmap? = null + + @Synchronized + override fun getBitmap(): Bitmap { + return bitmap ?: let { + bitmap = Utils.convertToBitmap(frameImage, frameRotation) + bitmap!! + } + } +} + class BitmapInputInfo(private val bitmap: Bitmap) : InputInfo { override fun getBitmap(): Bitmap { return bitmap diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt index 1423a8f8cb..f0e562ceee 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt @@ -19,7 +19,6 @@ package com.google.mlkit.md import android.animation.AnimatorInflater import android.animation.AnimatorSet import android.content.Intent -import android.hardware.Camera import android.os.Bundle import android.util.Log import android.view.View @@ -29,17 +28,17 @@ import androidx.lifecycle.Observer import androidx.lifecycle.ViewModelProviders import com.google.android.material.chip.Chip import com.google.common.base.Objects -import com.google.mlkit.md.camera.GraphicOverlay -import com.google.mlkit.md.camera.WorkflowModel -import com.google.mlkit.md.camera.WorkflowModel.WorkflowState -import com.google.mlkit.md.barcodedetection.BarcodeField import com.google.mlkit.md.barcodedetection.BarcodeProcessor +import com.google.mlkit.md.barcodedetection.BarcodeField import com.google.mlkit.md.barcodedetection.BarcodeResultFragment import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.md.camera.CameraSourcePreview +import com.google.mlkit.md.camera.CameraSourceFactory +import com.google.mlkit.md.camera.GraphicOverlay +import com.google.mlkit.md.camera.WorkflowModel +import com.google.mlkit.md.camera.WorkflowModel.WorkflowState import com.google.mlkit.md.settings.SettingsActivity import java.io.IOException -import java.util.ArrayList /** Demonstrates the barcode scanning workflow using camera preview. */ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { @@ -61,7 +60,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@LiveBarcodeScanningActivity) - cameraSource = CameraSource(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) @@ -115,16 +114,16 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { flashButton?.let { if (it.isSelected) { it.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.setFlashStatus(false) } else { it.isSelected = true - cameraSource!!.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource!!.setFlashStatus(true) } } } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } @@ -149,12 +148,18 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { if (workflowModel.isCameraLive) { workflowModel.markCameraFrozen() flashButton?.isSelected = false - preview?.stop() + try { + preview?.stop() + } + catch (e: Throwable){ + Log.e(TAG, "Failed to stop camera preview: ${e.message}") + } + } } private fun setUpWorkflowModel() { - workflowModel = ViewModelProviders.of(this).get(WorkflowModel::class.java) + workflowModel = ViewModelProviders.of(this)[WorkflowModel::class.java] // Observes the workflow state changes, if happens, update the overlay view indicators and // camera preview state. @@ -197,13 +202,13 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { } }) - workflowModel?.detectedBarcode?.observe(this, Observer { barcode -> + workflowModel?.detectedBarcode?.observe(this) { barcode -> if (barcode != null) { val barcodeFieldList = ArrayList() barcodeFieldList.add(BarcodeField("Raw Value", barcode.rawValue ?: "")) BarcodeResultFragment.show(supportFragmentManager, barcodeFieldList) } - }) + } } companion object { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt index 453f144883..7b6a84ee60 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt @@ -28,6 +28,7 @@ import android.view.View import android.view.View.OnClickListener import android.widget.ProgressBar import android.widget.TextView +import android.widget.Toast import androidx.appcompat.app.AppCompatActivity import androidx.lifecycle.Observer import androidx.lifecycle.ViewModelProviders @@ -42,6 +43,7 @@ import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState import com.google.mlkit.md.camera.CameraSource +import com.google.mlkit.md.camera.CameraSourceFactory import com.google.mlkit.md.camera.CameraSourcePreview import com.google.mlkit.md.objectdetection.MultiObjectProcessor import com.google.mlkit.md.objectdetection.ProminentObjectProcessor @@ -85,7 +87,7 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@LiveObjectDetectionActivity) - cameraSource = CameraSource(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) promptChipAnimator = @@ -160,15 +162,15 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { R.id.flash_button -> { if (flashButton?.isSelected == true) { flashButton?.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.setFlashStatus(false) } else { flashButton?.isSelected = true - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource?.setFlashStatus(true) } } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } @@ -274,9 +276,9 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { }) // Observes changes on the object to search, if happens, fire product search request. - objectToSearch.observe(this@LiveObjectDetectionActivity, Observer { detectObject -> - searchEngine!!.search(detectObject) { detectedObject, products -> - workflowModel?.onSearchCompleted(detectedObject, products) + objectToSearch.observe(this@LiveObjectDetectionActivity, Observer { confirmObject -> + searchEngine!!.search(confirmObject) { confirmedObject, products -> + workflowModel?.onSearchCompleted(confirmedObject, products) } }) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt index 1d23a7fee6..55c07fa2ca 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt @@ -40,6 +40,7 @@ import androidx.recyclerview.widget.RecyclerView import com.google.android.material.bottomsheet.BottomSheetBehavior import com.google.android.material.chip.Chip import com.google.common.collect.ImmutableList +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.productsearch.BottomSheetScrimView import com.google.mlkit.md.objectdetection.DetectedObjectInfo import com.google.mlkit.md.objectdetection.StaticObjectDotView @@ -249,16 +250,16 @@ class StaticObjectDetectionActivity : AppCompatActivity(), View.OnClickListener } else { searchedObjectMap.clear() for (i in objects.indices) { - searchEngine?.search(DetectedObjectInfo(objects[i], i, image)) { detectedObject, products -> - onSearchCompleted(detectedObject, products) + searchEngine?.search(ConfirmedObjectInfo.from(DetectedObjectInfo(objects[i], i, image))) { confirmedObject, products -> + onSearchCompleted(confirmedObject, products) } } } } - private fun onSearchCompleted(detectedObject: DetectedObjectInfo, productList: List) { - Log.d(TAG, "Search completed for object index: ${detectedObject.objectIndex}") - searchedObjectMap[detectedObject.objectIndex] = SearchedObject(resources, detectedObject, productList) + private fun onSearchCompleted(confirmedObject: ConfirmedObjectInfo, productList: List) { + Log.d(TAG, "Search completed for object index: ${confirmedObject.objectIndex}") + searchedObjectMap[confirmedObject.objectIndex] = SearchedObject(resources, confirmedObject, productList) if (searchedObjectMap.size < detectedObjectNum) { // Hold off showing the result until the search of all detected objects completes. return diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt index e89f438451..49e4f1d0b0 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt @@ -25,6 +25,7 @@ import android.content.res.Configuration import android.graphics.Bitmap import android.graphics.BitmapFactory import android.graphics.Canvas +import android.graphics.ImageFormat import android.graphics.Matrix import android.graphics.Paint import android.graphics.PorterDuff @@ -32,20 +33,21 @@ import android.graphics.PorterDuffXfermode import android.graphics.Rect import android.graphics.RectF import android.graphics.YuvImage -import android.hardware.Camera +import android.media.Image import android.net.Uri import android.util.Log import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat.checkSelfPermission import androidx.exifinterface.media.ExifInterface import com.google.mlkit.md.camera.CameraSizePair +import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.vision.common.InputImage import java.io.ByteArrayOutputStream import java.io.IOException import java.io.InputStream import java.nio.ByteBuffer -import java.util.ArrayList import kotlin.math.abs +import kotlin.math.min /** Utility class to provide helper methods. */ object Utils { @@ -101,10 +103,10 @@ object Utils { * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the * preview images may be distorted on some devices. */ - fun generateValidPreviewSizeList(camera: Camera): List { - val parameters = camera.parameters - val supportedPreviewSizes = parameters.supportedPreviewSizes - val supportedPictureSizes = parameters.supportedPictureSizes + fun generateValidPreviewSizeList(cameraSource: CameraSource): List { + + val supportedPreviewSizes = cameraSource.getSupportedPreviewSizes() + val supportedPictureSizes = cameraSource.getSupportedPictureSizes() val validPreviewSizes = ArrayList() for (previewSize in supportedPreviewSizes) { val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() @@ -149,28 +151,104 @@ object Utils { /** Convert NV21 format byte buffer to bitmap. */ fun convertToBitmap(data: ByteBuffer, width: Int, height: Int, rotationDegrees: Int): Bitmap? { - data.rewind() - val imageInBuffer = ByteArray(data.limit()) - data.get(imageInBuffer, 0, imageInBuffer.size) try { - val image = YuvImage( - imageInBuffer, InputImage.IMAGE_FORMAT_NV21, width, height, null - ) - val stream = ByteArrayOutputStream() - image.compressToJpeg(Rect(0, 0, width, height), 80, stream) - val bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) - stream.close() - - // Rotate the image back to straight. - val matrix = Matrix() - matrix.postRotate(rotationDegrees.toFloat()) - return Bitmap.createBitmap(bmp, 0, 0, bmp.width, bmp.height, matrix, true) - } catch (e: java.lang.Exception) { - Log.e(TAG, "Error: " + e.message) + data.rewind() + val imageInBuffer = ByteArray(data.limit()) + data.get(imageInBuffer, 0, imageInBuffer.size) + return convertToBitmapInternal(imageInBuffer, width, height, rotationDegrees) + } + catch (e: Exception){ + Log.e(TAG, "Error at converting Byte Buffer to Bitmap: " + e.message) + } + return null + } + + /** Convert YUV_420_888 format byte buffer to bitmap. */ + fun convertToBitmap(image: Image, rotationDegrees: Int): Bitmap? { + try { + return convertToBitmapInternal(yuv_420_888toNv21(image), image.width, image.height, rotationDegrees) + } + catch (e: Exception){ + Log.e(TAG, "Error at converting Image to Bitmap: " + e.message) } return null } + private fun convertToBitmapInternal(imageData: ByteArray, width: Int, height: Int, rotationDegrees: Int): Bitmap { + val image = YuvImage( + imageData, InputImage.IMAGE_FORMAT_NV21, width, height, null + ) + val stream = ByteArrayOutputStream() + image.compressToJpeg(Rect(0, 0, width, height), 80, stream) + val bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) + stream.close() + + // Rotate the image back to straight. + val matrix = Matrix() + matrix.postRotate(rotationDegrees.toFloat()) + return Bitmap.createBitmap(bmp, 0, 0, bmp.width, bmp.height, matrix, true) + } + + @Throws(IllegalArgumentException::class) + private fun yuv_420_888toNv21(image: Image): ByteArray { + require(image.format == ImageFormat.YUV_420_888) { + "only support ImageFormat.YUV_420_888 image conversion" + } + val yPlane = image.planes[0] + val uPlane = image.planes[1] + val vPlane = image.planes[2] + + val yBuffer = yPlane.buffer + val uBuffer = uPlane.buffer + val vBuffer = vPlane.buffer + yBuffer.rewind() + uBuffer.rewind() + vBuffer.rewind() + + val ySize = yBuffer.remaining() + + var position = 0 + // TODO(b/115743986): Pull these bytes from a pool instead of allocating for every image. + val nv21 = ByteArray(ySize + (image.width * image.height / 2)) + + // Add the full y buffer to the array. If rowStride > 1, some padding may be skipped. + for (row in 0 until image.height) { + yBuffer[nv21, position, image.width] + position += image.width + yBuffer.position( + min(ySize.toDouble(), (yBuffer.position() - image.width + yPlane.rowStride).toDouble()) + .toInt() + ) + } + + val chromaHeight = image.height / 2 + val chromaWidth = image.width / 2 + val vRowStride = vPlane.rowStride + val uRowStride = uPlane.rowStride + val vPixelStride = vPlane.pixelStride + val uPixelStride = uPlane.pixelStride + + // Interleave the u and v frames, filling up the rest of the buffer. Use two line buffers to + // perform faster bulk gets from the byte buffers. + val vLineBuffer = ByteArray(vRowStride) + val uLineBuffer = ByteArray(uRowStride) + for (row in 0 until chromaHeight) { + vBuffer[vLineBuffer, 0, min(vRowStride.toDouble(), vBuffer.remaining().toDouble()).toInt()] + uBuffer[uLineBuffer, 0, min(uRowStride.toDouble(), uBuffer.remaining().toDouble()).toInt()] + var vLineBufferPosition = 0 + var uLineBufferPosition = 0 + for (col in 0 until chromaWidth) { + nv21[position++] = vLineBuffer[vLineBufferPosition] + nv21[position++] = uLineBuffer[uLineBufferPosition] + vLineBufferPosition += vPixelStride + uLineBufferPosition += uPixelStride + } + } + + return nv21 + } + + internal fun openImagePicker(activity: Activity) { val intent = Intent(Intent.ACTION_GET_CONTENT) intent.addCategory(Intent.CATEGORY_OPENABLE) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt index 51bd006f51..63b815cb05 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt @@ -20,6 +20,7 @@ import android.animation.ValueAnimator import android.util.Log import androidx.annotation.MainThread import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.InputInfo import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay @@ -39,8 +40,10 @@ class BarcodeProcessor(graphicOverlay: GraphicOverlay, private val workflowModel private val scanner = BarcodeScanning.getClient() private val cameraReticleAnimator: CameraReticleAnimator = CameraReticleAnimator(graphicOverlay) - override fun detectInImage(image: InputImage): Task> = - scanner.process(image) + override fun detectInImage(image: MlImage): Task> = scanner.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = scanner.process(image) @MainThread override fun onSuccess( @@ -105,7 +108,7 @@ class BarcodeProcessor(graphicOverlay: GraphicOverlay, private val workflowModel } } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Barcode detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt new file mode 100644 index 0000000000..9d67a24c3d --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt @@ -0,0 +1,600 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.Manifest +import android.content.Context +import android.content.pm.PackageManager +import android.graphics.ImageFormat +import android.hardware.camera2.* +import android.media.Image +import android.media.ImageReader +import android.os.Handler +import android.os.HandlerThread +import android.util.Log +import android.util.Size +import android.view.Surface +import android.view.SurfaceHolder +import androidx.core.app.ActivityCompat +import com.google.android.odml.image.MediaMlImageBuilder +import com.google.android.odml.image.MlImage +import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.settings.PreferenceUtils +import com.google.mlkit.md.utils.OrientationLiveData +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.suspendCancellableCoroutine +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock +import java.io.IOException +import java.util.* +import kotlin.coroutines.resume +import kotlin.coroutines.resumeWithException +import kotlin.coroutines.suspendCoroutine +import kotlin.math.abs + +/** + * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This + * receives preview frames from the camera at a specified rate, sends those frames to detector as + * fast as it is able to process. + * + * + * This camera source makes a best effort to manage processing on preview frames as fast as + * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector + * is unable to keep up with the rate of frames generated by the camera. + */ + +class Camera2APISource(private val graphicOverlay: GraphicOverlay): CameraSource() { + + private val context: Context = graphicOverlay.context + + /** Detects, characterizes, and connects to a CameraDevice (used for all camera operations) */ + private val cameraManager: CameraManager by lazy { + getCameraManager(context) + } + + /** [cameraId] corresponding to the provided Camera facing back property */ + private val cameraId: String by lazy { + getCameraId(context) + } + + /** [CameraCharacteristics] corresponding to the provided Camera ID */ + private val characteristics: CameraCharacteristics by lazy { + getCameraCharacteristics(context) + } + + /** The [CameraDevice] that will be used for preview */ + private var camera: CameraDevice? = null + + /** The [ImageReader] that will used for reading image frame buffers */ + private var imageReader: ImageReader? = null + + /** The [CaptureRequest.Builder] that will be used for session */ + private var captureRequest: CaptureRequest.Builder? = null + + /** Internal reference to the ongoing [CameraCaptureSession] configured with our parameters */ + private var session: CameraCaptureSession? = null + + /** [HandlerThread] where all camera operations run */ + private val cameraThread = HandlerThread("CameraThread").apply { start() } + + /** [Handler] corresponding to [cameraThread] */ + private val cameraHandler = Handler(cameraThread.looper) + + /** [HandlerThread] where all buffer reading operations run */ + private val imageReaderThread = HandlerThread("imageReaderThread").apply { start() } + + /** [Handler] corresponding to [imageReaderThread] */ + private val imageReaderHandler = Handler(imageReaderThread.looper) + + /** [OrientationLiveData] correspond to current device orientation relative to the [camera] or listening to the changes in it */ + private val relativeOrientation: OrientationLiveData by lazy { + OrientationLiveData(context, characteristics) + } + + /** [Observer] for listening the changes in the [relativeOrientation] */ + private val orientationObserver = androidx.lifecycle.Observer { rotation -> + Log.d(TAG, "Orientation changed: $rotation") + } + + /** [Size] that is currently in use by the [camera] */ + private var previewSize: Size? = null + + /** [Thread] for detecting & processing [imageReader] frames */ + private var processingThread: Thread? = null + + /** [FrameProcessingRunnable] associated with the [processingThread] */ + private val processingRunnable = FrameProcessingRunnable() + + /** [Object] to lock the [frameProcessor] operations */ + private val processorLock = Object() + + /** [Mutex] to lock the CoroutineScope operations */ + private val mutex = Mutex() + + /** [FrameProcessor] to process the frames received inside [processingRunnable] */ + private var frameProcessor: FrameProcessor? = null + + /** + * Start the camera preview on the provided surface and process images through image reader buffer + * + * @param captureRequest the capture request builder to use for the session. + * @param imageReader the image reader for receiving the preview images for processing. + * @param session the configured camera capture session for the camera device. + * + * @throws Exception if the supplied surface holder could not be used as the preview display. + */ + + @Throws(Exception::class) + private fun startPreview(captureRequest: CaptureRequest.Builder, imageReader: ImageReader, session: CameraCaptureSession){ + // This will keep sending the capture request as frequently as possible until the + // session is torn down or session.stopRepeating() is called + session.setRepeatingRequest(captureRequest.build(), null, cameraHandler) + + //Setup listener for receiving the preview frames for processing + imageReader.setOnImageAvailableListener({ + try { + it.acquireNextImage()?.let {image -> + val rotation = relativeOrientation.value ?: 0 + processingRunnable.setNextFrame(image, rotation) + } + } + catch (e: IllegalStateException){ + e.printStackTrace() + Log.e(TAG, "${e.message} At acquire next image") + } + }, imageReaderHandler) + + } + + /** + * Update the camera preview with the changes in the capture request builder + * + * @param captureRequest the capture request builder to use for the session. + * @param session the configured camera capture session for the camera device. + * + * @throws Exception if the supplied surface holder could not be used as the preview display. + * + * */ + @Throws(Exception::class) + private fun updatePreview(captureRequest: CaptureRequest.Builder, session: CameraCaptureSession){ + session.setRepeatingRequest(captureRequest.build(), null, cameraHandler) + } + + private fun updateFlashMode(enabled: Boolean) { + val flashAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) as Boolean + if(flashAvailable){ + session?.let {session -> + captureRequest?.let { captureRequest -> + captureRequest.set(CaptureRequest.FLASH_MODE, + if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF) + updatePreview(captureRequest, session) + } + } + } + } + + /** + * Opens the camera and applies the user settings. + * + * @throws Exception if camera cannot be found or preview cannot be processed. + */ + @Throws(Exception::class) + private suspend fun createCamera(): CameraDevice = suspendCancellableCoroutine {cont -> + + if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { + if (cont.isActive) cont.resumeWithException(IOException("Camera permission not granted")) + } + + cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() { + override fun onOpened(camera: CameraDevice) = cont.resume(camera) + + override fun onDisconnected(camera: CameraDevice) { + val exec = IOException("Camera $cameraId has been disconnected") + Log.e(TAG, exec.message, exec) + if (cont.isActive) cont.resumeWithException(exec) + } + + override fun onError(camera: CameraDevice, error: Int) { + val msg = when (error) { + ERROR_CAMERA_DEVICE -> "Fatal (device)" + ERROR_CAMERA_DISABLED -> "Device policy" + ERROR_CAMERA_IN_USE -> "Camera in use" + ERROR_CAMERA_SERVICE -> "Fatal (service)" + ERROR_MAX_CAMERAS_IN_USE -> "Maximum cameras in use" + else -> "Unknown" + } + val exc = IOException("Camera $cameraId error: ($error) $msg") + Log.e(TAG, exc.message, exc) + if(cont.isActive) cont.resumeWithException(exc) + } + + }, cameraHandler) + + } + + /** + * Starts a [CameraCaptureSession] and returns the configured session + * + * @throws Exception if session cannot be created. + */ + @Throws(Exception::class) + private suspend fun createCaptureSession(device: CameraDevice, targets: List, handler: Handler? = null): CameraCaptureSession = suspendCoroutine{ cont -> + + // Create a capture session using the predefined targets; this also involves defining the + // session state callback to be notified of when the session is ready + device.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() { + + override fun onConfigured(session: CameraCaptureSession) = cont.resume(session) + + override fun onConfigureFailed(session: CameraCaptureSession) { + val exc = RuntimeException("Camera ${device.id} session configuration failed") + Log.e(TAG, exc.message, exc) + cont.resumeWithException(exc) + } + }, handler) + } + + /** + * Get the most suitable [CameraSizePair] from aspect ratio perspective. + * + * @throws Exception if cannot find a suitable size. + */ + @Throws(Exception::class) + private fun getPreviewAndPictureSize(cameraSource: CameraSource): CameraSizePair { + + // Gives priority to the preview size specified by the user if exists. + val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { + // Camera preview size is based on the landscape mode, so we need to also use the aspect + // ration of display in the same mode for comparison. + val displayAspectRatioInLandscape: Float = + if (Utils.isPortraitMode(graphicOverlay.context)) { + graphicOverlay.height.toFloat() / graphicOverlay.width + } else { + graphicOverlay.width.toFloat() / graphicOverlay.height + } + selectSizePair(cameraSource, displayAspectRatioInLandscape) + } ?: throw IOException("Could not find suitable preview size.") + + sizePair.preview.let { + Log.v(TAG, "Camera preview size: $it") + PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) + } + + sizePair.picture?.let { pictureSize -> + Log.v(TAG, "Camera picture size: $pictureSize") + PreferenceUtils.saveStringPreference( + context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() + ) + } + return sizePair + } + + //Camera source overrides + + override fun getSupportedPreviewSizes(): Array = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(SurfaceHolder::class.java) + + override fun getSupportedPictureSizes(): Array = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(IMAGE_FORMAT) + + override fun setFrameProcessor(processor: FrameProcessor) { + graphicOverlay.clear() + synchronized(processorLock) { + frameProcessor?.stop() + frameProcessor = processor + } + } + + override fun setFlashStatus(status: Boolean){ + if (status){ + updateFlashMode(true) + } + else{ + updateFlashMode(false) + } + } + + override fun getSelectedPreviewSize() = previewSize + + override fun start(surfaceHolder: SurfaceHolder) { + runBlocking { + mutex.withLock { + + if (camera != null) return@withLock + + camera = createCamera().also { cameraDevice -> + getPreviewAndPictureSize(this@Camera2APISource).preview.let { previewSize -> + imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> + session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also { cameraCaptureSession -> + captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { + addTarget(surfaceHolder.surface) + addTarget(imageReader.surface) + startPreview( this, imageReader, cameraCaptureSession) + } + } + } + } + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + relativeOrientation.observeForever(orientationObserver) + } + + } + } + } + + override fun stop() { + runBlocking { + mutex.withLock { + Log.d(TAG, "Stop is called") + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } + + // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. + imageReader?.let { + it.setOnImageAvailableListener(null, null) + imageReader = null + } + + relativeOrientation.removeObserver(orientationObserver) + + camera?.let { + it.close() + camera = null + } + } + } + } + + override fun release() { + graphicOverlay.clear() + synchronized(processorLock) { + stop() + frameProcessor?.stop() + cameraThread.quitSafely() + imageReaderThread.quitSafely() + } + } + + /** + * This runnable controls access to the underlying receiver, calling it to process frames when + * available from the camera. This is designed to run detection on frames as fast as possible + * (i.e., without unnecessary context switching or waiting on the next frame). + * + * + * While detection is running on a frame, new frames may be received from the camera. As these + * frames come in, the most recent frame is held onto as pending. As soon as detection and its + * associated processing is done for the previous frame, detection on the mostly recently received + * frame will immediately start on the same thread. + */ + private inner class FrameProcessingRunnable : Runnable { + + // This lock guards all of the member variables below. + private val lock = Object() + private var active = true + + // These pending variables hold the state associated with the new frame awaiting processing. + private var pendingFrame: Image? = null + private var pendingFrameRotation: Int = 0 + + /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ + fun setActive(active: Boolean) { + synchronized(lock) { + this.active = active + lock.notifyAll() + } + } + + /** + * Sets the frame data received from the camera. This adds the previous unused frame buffer (if + * present) back to the camera, and keeps a pending reference to the frame data for future use. + */ + fun setNextFrame(image: Image, rotation: Int) { + synchronized(lock) { + pendingFrame?.let { + it.close() + pendingFrame = null + } + + pendingFrame = image + pendingFrameRotation = rotation + + // Notify the processor thread if it is waiting on the next frame (see below). + lock.notifyAll() + } + } + + /** + * As long as the processing thread is active, this executes detection on frames continuously. + * The next pending frame is either immediately available or hasn't been received yet. Once it + * is available, we transfer the frame info to local variables and run detection on that frame. + * It immediately loops back for the next frame without pausing. + * + * + * If detection takes longer than the time in between new frames from the camera, this will + * mean that this loop will run without ever waiting on a frame, avoiding any context switching + * or frame acquisition time latency. + * + * + * If you find that this is using more CPU than you'd like, you should probably decrease the + * FPS setting above to allow for some idle time in between frames. + */ + override fun run() { + var data: MlImage? + + while (true) { + synchronized(lock) { + while (active && pendingFrame == null) { + try { + // Wait for the next frame to be received from the camera, since we don't have it yet. + lock.wait() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing loop terminated.", e) + return + } + } + + if (!active) { + // Exit the loop once this camera source is stopped or released. We check this here, + // immediately after the wait() above, to handle the case where setActive(false) had + // been called, triggering the termination of this loop. + return + } + + // Hold onto the frame data locally, so that we can use this for detection + // below. We need to clear pendingFrameData to ensure that this buffer isn't + // recycled back to the camera before we are done using that data. + data = pendingFrame?.let { + MediaMlImageBuilder(it) + .setRotation(pendingFrameRotation) + .build() + } + pendingFrame = null + + } + + try { + synchronized(processorLock) { + data?.let { + if(frameProcessor?.process(it, graphicOverlay) == true){ + //Do nothing as frame processor accepted the image for processing + // and it will close the image once the detection gets completed on it + } + else{ + //Close image immediately because either frame processor is + // not set or it's currently busy processing previous image + it.close() + } + } + } + } catch (t: Exception) { + Log.e(TAG, "Exception thrown from receiver.", t) + //precautionary image close request in-case there is an exception occurred + // while submitting the image to the frame processor + data?.close() + } + } + } + } + + companion object { + + const val CAMERA_FACING_BACK = CameraCharacteristics.LENS_FACING_BACK + const val IMAGE_FORMAT = ImageFormat.YUV_420_888 + + private const val TAG = "Camera2APISource" + + /** Maximum number of images that will be held in the reader's buffer */ + private const val IMAGE_BUFFER_SIZE: Int = 3 + + private const val MIN_CAMERA_PREVIEW_WIDTH = 400 + private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 + + private fun getCameraManager(context: Context) = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager + + private fun getCameraId(context: Context): String { + val cameraManager = getCameraManager(context) + cameraManager.cameraIdList.forEach { + val characteristics = cameraManager.getCameraCharacteristics(it) + if (characteristics.get(CameraCharacteristics.LENS_FACING) == CAMERA_FACING_BACK){ + return it + } + } + throw IOException("No Camera found matching the back facing lens $CAMERA_FACING_BACK") + } + + fun getCameraCharacteristics(context: Context) = getCameraManager(context).getCameraCharacteristics(getCameraId(context)) + + /** + * Selects the most suitable preview and picture size, given the display aspect ratio in landscape + * mode. + * + * + * It's firstly trying to pick the one that has closest aspect ratio to display view with its + * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there are multiple candidates, choose the one having longest + * width. + * + * + * If the above looking up failed, chooses the one that has the minimum sum of the differences + * between the desired values and the actual values for width and height. + * + * + * Even though we only need to find the preview size, it's necessary to find both the preview + * size and the picture size of the camera together, because these need to have the same aspect + * ratio. On some hardware, if you would only set the preview size, you will get a distorted + * image. + * + * @param cameraSource the selected camera source to select a preview size from + * @return the selected preview and picture size pair + */ + private fun selectSizePair(cameraSource: CameraSource, displayAspectRatioInLandscape: Float): CameraSizePair? { + val validPreviewSizes = Utils.generateValidPreviewSizeList(cameraSource) + + var selectedPair: CameraSizePair? = null + // Picks the preview size that has closest aspect ratio to display view. + var minAspectRatioDiff = Float.MAX_VALUE + + for (sizePair in validPreviewSizes) { + val previewSize = sizePair.preview + if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { + continue + } + + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() + val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { + if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { + selectedPair = sizePair + } + } else if (aspectRatioDiff < minAspectRatioDiff) { + minAspectRatioDiff = aspectRatioDiff + selectedPair = sizePair + } + } + + if (selectedPair == null) { + // Picks the one that has the minimum sum of the differences between the desired values and + // the actual values for width and height. + var minDiff = Integer.MAX_VALUE + for (sizePair in validPreviewSizes) { + val size = sizePair.preview + val diff = + abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + + abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) + if (diff < minDiff) { + selectedPair = sizePair + minDiff = diff + } + } + } + + return selectedPair + } + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt new file mode 100644 index 0000000000..518e54025b --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt @@ -0,0 +1,531 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.content.Context +import android.graphics.ImageFormat +import android.hardware.Camera +import android.hardware.Camera.CameraInfo +import android.hardware.Camera.Parameters +import android.util.Log +import android.util.Size +import android.view.Surface +import android.view.SurfaceHolder +import android.view.WindowManager +import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.settings.PreferenceUtils +import java.io.IOException +import java.nio.ByteBuffer +import java.util.IdentityHashMap +import kotlin.math.abs +import kotlin.math.ceil + +/** + * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This + * receives preview frames from the camera at a specified rate, sends those frames to detector as + * fast as it is able to process. + * + * + * This camera source makes a best effort to manage processing on preview frames as fast as + * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector + * is unable to keep up with the rate of frames generated by the camera. + */ +@Suppress("DEPRECATION") +class CameraAPISource(private val graphicOverlay: GraphicOverlay) : CameraSource() { + + private var camera: Camera? = null + private var parameters: Parameters? = null + private var rotationDegrees: Int = 0 + + /** Returns the preview size that is currently in use by the underlying camera. */ + internal var previewSize: Size? = null + private set + + /** + * Dedicated thread and associated runnable for calling into the detector with frames, as the + * frames become available from the camera. + */ + private var processingThread: Thread? = null + private val processingRunnable = FrameProcessingRunnable() + + private val processorLock = Object() + private var frameProcessor: FrameProcessor? = null + + /** + * Map to convert between a byte array, received from the camera, and its associated byte buffer. + * We use byte buffers internally because this is a more efficient way to call into native code + * later (avoids a potential copy). + * + * + * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's + * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces + * identity ('==') check on the keys. + */ + private val bytesToByteBuffer = IdentityHashMap() + private val context: Context = graphicOverlay.context + + private fun updateFlashMode(flashMode: String) { + val parameters = camera?.parameters + parameters?.flashMode = flashMode + camera?.parameters = parameters + } + + /** + * Opens the camera and applies the user settings. + * + * @throws IOException if camera cannot be found or preview cannot be processed. + */ + @Throws(IOException::class) + private fun createCamera(): Camera { + val camera = Camera.open() ?: throw IOException("There is no back-facing camera.") + val parameters = camera.parameters.also { + this.parameters = it + } + setPreviewAndPictureSize(this, parameters) + setRotation(camera, parameters) + + val previewFpsRange = selectPreviewFpsRange(camera) + ?: throw IOException("Could not find suitable preview frames per second range.") + parameters.setPreviewFpsRange( + previewFpsRange[Parameters.PREVIEW_FPS_MIN_INDEX], + previewFpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] + ) + + parameters.previewFormat = IMAGE_FORMAT + + if (parameters.supportedFocusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.focusMode = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO + } else { + Log.i(TAG, "Camera auto focus is not supported on this device.") + } + + camera.parameters = parameters + + camera.setPreviewCallbackWithBuffer(processingRunnable::setNextFrame) + + // Four frame buffers are needed for working with the camera: + // + // one for the frame that is currently being executed upon in doing detection + // one for the next pending frame to process immediately upon completing detection + // two for the frames that the camera uses to populate future preview images + // + // Through trial and error it appears that two free buffers, in addition to the two buffers + // used in this code, are needed for the camera to work properly. Perhaps the camera has one + // thread for acquiring images, and another thread for calling into user code. If only three + // buffers are used, then the camera will spew thousands of warning messages when detection + // takes a non-trivial amount of time. + previewSize?.let { + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + } + + return camera + } + + @Throws(IOException::class) + private fun setPreviewAndPictureSize(cameraSource: CameraSource, parameters: Parameters) { + + // Gives priority to the preview size specified by the user if exists. + val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { + // Camera preview size is based on the landscape mode, so we need to also use the aspect + // ration of display in the same mode for comparison. + val displayAspectRatioInLandscape: Float = + if (Utils.isPortraitMode(graphicOverlay.context)) { + graphicOverlay.height.toFloat() / graphicOverlay.width + } else { + graphicOverlay.width.toFloat() / graphicOverlay.height + } + selectSizePair(cameraSource, displayAspectRatioInLandscape) + } ?: throw IOException("Could not find suitable preview size.") + + previewSize = sizePair.preview.also { + Log.v(TAG, "Camera preview size: $it") + parameters.setPreviewSize(it.width, it.height) + PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) + } + + sizePair.picture?.let { pictureSize -> + Log.v(TAG, "Camera picture size: $pictureSize") + parameters.setPictureSize(pictureSize.width, pictureSize.height) + PreferenceUtils.saveStringPreference( + context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() + ) + } + } + + /** + * Calculates the correct rotation for the given camera id and sets the rotation in the + * parameters. It also sets the camera's display orientation and rotation. + * + * @param parameters the camera parameters for which to set the rotation. + */ + private fun setRotation(camera: Camera, parameters: Parameters) { + val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager + val degrees = when (val deviceRotation = windowManager.defaultDisplay.rotation) { + Surface.ROTATION_0 -> 0 + Surface.ROTATION_90 -> 90 + Surface.ROTATION_180 -> 180 + Surface.ROTATION_270 -> 270 + else -> { + Log.e(TAG, "Bad device rotation value: $deviceRotation") + 0 + } + } + + val cameraInfo = CameraInfo() + Camera.getCameraInfo(CAMERA_FACING_BACK, cameraInfo) + val angle = (cameraInfo.orientation - degrees + 360) % 360 + // This corresponds to the rotation constants in FirebaseVisionImageMetadata. + this.rotationDegrees = angle + camera.setDisplayOrientation(angle) + parameters.setRotation(angle) + } + + /** + * Creates one buffer for the camera preview callback. The size of the buffer is based off of the + * camera preview size and the format of the camera image. + * + * @return a new preview buffer of the appropriate size for the current camera settings. + */ + private fun createPreviewBuffer(previewSize: Size): ByteArray { + val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) + val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() + val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 + + // Creating the byte array this way and wrapping it, as opposed to using .allocate(), + // should guarantee that there will be an array to work with. + val byteArray = ByteArray(bufferSize) + val byteBuffer = ByteBuffer.wrap(byteArray) + check(!(!byteBuffer.hasArray() || !byteBuffer.array().contentEquals(byteArray))) { + // This should never happen. If it does, then we wouldn't be passing the preview content to + // the underlying detector later. + "Failed to create valid buffer for camera source." + } + + bytesToByteBuffer[byteArray] = byteBuffer + return byteArray + } + + //Camera Source overrides + + override fun getSupportedPreviewSizes(): Array = parameters?.supportedPreviewSizes + ?.map { Size(it.width, it.height) }?.toTypedArray() ?: emptyArray() + + override fun getSupportedPictureSizes(): Array = parameters?.supportedPictureSizes + ?.map { Size(it.width, it.height) }?.toTypedArray() ?: emptyArray() + + override fun setFrameProcessor(processor: FrameProcessor) { + graphicOverlay.clear() + synchronized(processorLock) { + frameProcessor?.stop() + frameProcessor = processor + } + } + + override fun setFlashStatus(status: Boolean) { + if (status){ + updateFlashMode(Parameters.FLASH_MODE_ON) + } + else{ + updateFlashMode(Parameters.FLASH_MODE_OFF) + } + } + + override fun getSelectedPreviewSize(): Size? = previewSize + + @Synchronized + override fun start(surfaceHolder: SurfaceHolder) { + if (camera != null) return + + camera = createCamera().apply { + setPreviewDisplay(surfaceHolder) + startPreview() + } + + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + } + + @Synchronized + override fun stop() { + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } + + camera?.let { + it.stopPreview() + it.setPreviewCallbackWithBuffer(null) + try { + it.setPreviewDisplay(null) + } catch (e: Exception) { + Log.e(TAG, "Failed to clear camera preview: $e") + } + it.release() + camera = null + } + + // Release the reference to any image buffers, since these will no longer be in use. + bytesToByteBuffer.clear() + } + + override fun release() { + graphicOverlay.clear() + synchronized(processorLock) { + stop() + frameProcessor?.stop() + } + } + + /** + * This runnable controls access to the underlying receiver, calling it to process frames when + * available from the camera. This is designed to run detection on frames as fast as possible + * (i.e., without unnecessary context switching or waiting on the next frame). + * + * + * While detection is running on a frame, new frames may be received from the camera. As these + * frames come in, the most recent frame is held onto as pending. As soon as detection and its + * associated processing is done for the previous frame, detection on the mostly recently received + * frame will immediately start on the same thread. + */ + private inner class FrameProcessingRunnable : Runnable { + + // This lock guards all of the member variables below. + private val lock = Object() + private var active = true + + // These pending variables hold the state associated with the new frame awaiting processing. + private var pendingFrameData: ByteBuffer? = null + + /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ + fun setActive(active: Boolean) { + synchronized(lock) { + this.active = active + lock.notifyAll() + } + } + + /** + * Sets the frame data received from the camera. This adds the previous unused frame buffer (if + * present) back to the camera, and keeps a pending reference to the frame data for future use. + */ + fun setNextFrame(data: ByteArray, camera: Camera) { + synchronized(lock) { + pendingFrameData?.let { + camera.addCallbackBuffer(it.array()) + pendingFrameData = null + } + + if (!bytesToByteBuffer.containsKey(data)) { + Log.d( + TAG, + "Skipping frame. Could not find ByteBuffer associated with the image data from the camera." + ) + return + } + + pendingFrameData = bytesToByteBuffer[data] + + // Notify the processor thread if it is waiting on the next frame (see below). + lock.notifyAll() + } + } + + /** + * As long as the processing thread is active, this executes detection on frames continuously. + * The next pending frame is either immediately available or hasn't been received yet. Once it + * is available, we transfer the frame info to local variables and run detection on that frame. + * It immediately loops back for the next frame without pausing. + * + * + * If detection takes longer than the time in between new frames from the camera, this will + * mean that this loop will run without ever waiting on a frame, avoiding any context switching + * or frame acquisition time latency. + * + * + * If you find that this is using more CPU than you'd like, you should probably decrease the + * FPS setting above to allow for some idle time in between frames. + */ + override fun run() { + var data: ByteBuffer? + + while (true) { + synchronized(lock) { + while (active && pendingFrameData == null) { + try { + // Wait for the next frame to be received from the camera, since we don't have it yet. + lock.wait() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing loop terminated.", e) + return + } + } + + if (!active) { + // Exit the loop once this camera source is stopped or released. We check this here, + // immediately after the wait() above, to handle the case where setActive(false) had + // been called, triggering the termination of this loop. + return + } + + // Hold onto the frame data locally, so that we can use this for detection + // below. We need to clear pendingFrameData to ensure that this buffer isn't + // recycled back to the camera before we are done using that data. + data = pendingFrameData + pendingFrameData = null + } + + try { + synchronized(processorLock) { + val frameMetadata = FrameMetadata(previewSize!!.width, previewSize!!.height, rotationDegrees) + data?.let { + frameProcessor?.process(it, frameMetadata, graphicOverlay) + } + } + } catch (t: Exception) { + Log.e(TAG, "Exception thrown from receiver.", t) + } finally { + data?.let { + camera?.addCallbackBuffer(it.array()) + } + } + } + } + } + + companion object { + + const val CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK + + private const val TAG = "CameraAPISource" + + private const val IMAGE_FORMAT = ImageFormat.NV21 + private const val MIN_CAMERA_PREVIEW_WIDTH = 400 + private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 + private const val REQUESTED_CAMERA_FPS = 30.0f + + /** + * Selects the most suitable preview and picture size, given the display aspect ratio in landscape + * mode. + * + * + * It's firstly trying to pick the one that has closest aspect ratio to display view with its + * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there are multiple candidates, choose the one having longest + * width. + * + * + * If the above looking up failed, chooses the one that has the minimum sum of the differences + * between the desired values and the actual values for width and height. + * + * + * Even though we only need to find the preview size, it's necessary to find both the preview + * size and the picture size of the camera together, because these need to have the same aspect + * ratio. On some hardware, if you would only set the preview size, you will get a distorted + * image. + * + * @param cameraSource the camera source to select a preview size from + * @return the selected preview and picture size pair + */ + private fun selectSizePair(cameraSource: CameraSource, displayAspectRatioInLandscape: Float): CameraSizePair? { + val validPreviewSizes = Utils.generateValidPreviewSizeList(cameraSource) + + var selectedPair: CameraSizePair? = null + // Picks the preview size that has closest aspect ratio to display view. + var minAspectRatioDiff = Float.MAX_VALUE + + for (sizePair in validPreviewSizes) { + val previewSize = sizePair.preview + if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { + continue + } + + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() + val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { + if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { + selectedPair = sizePair + } + } else if (aspectRatioDiff < minAspectRatioDiff) { + minAspectRatioDiff = aspectRatioDiff + selectedPair = sizePair + } + } + + if (selectedPair == null) { + // Picks the one that has the minimum sum of the differences between the desired values and + // the actual values for width and height. + var minDiff = Integer.MAX_VALUE + for (sizePair in validPreviewSizes) { + val size = sizePair.preview + val diff = + abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + + abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) + if (diff < minDiff) { + selectedPair = sizePair + minDiff = diff + } + } + } + + return selectedPair + } + + /** + * Selects the most suitable preview frames per second range. + * + * @param camera the camera to select a frames per second range from + * @return the selected preview frames per second range + */ + private fun selectPreviewFpsRange(camera: Camera): IntArray? { + // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame + // rates. + val desiredPreviewFpsScaled = (REQUESTED_CAMERA_FPS * 1000f).toInt() + + // The method for selecting the best range is to minimize the sum of the differences between + // the desired value and the upper and lower bounds of the range. This may select a range + // that the desired value is outside of, but this is often preferred. For example, if the + // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the + // range (15, 30). + var selectedFpsRange: IntArray? = null + var minDiff = Integer.MAX_VALUE + for (range in camera.parameters.supportedPreviewFpsRange) { + val deltaMin = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MIN_INDEX] + val deltaMax = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MAX_INDEX] + val diff = abs(deltaMin) + abs(deltaMax) + if (diff < minDiff) { + selectedFpsRange = range + minDiff = diff + } + } + return selectedFpsRange + } + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt index a3d025296e..21a0d3c4ee 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt @@ -17,7 +17,9 @@ package com.google.mlkit.md.camera import android.hardware.Camera -import com.google.android.gms.common.images.Size +import android.os.Parcelable +import android.util.Size +import kotlinx.android.parcel.Parcelize /** * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted @@ -25,17 +27,11 @@ import com.google.android.gms.common.images.Size * ratio as the preview size or the preview may end up being distorted. If the picture size is null, * then there is no picture size with the same aspect ratio as the preview size. */ -class CameraSizePair { - val preview: Size - val picture: Size? +@Parcelize +data class CameraSizePair(val preview: Size, val picture: Size?): Parcelable { - constructor(previewSize: Camera.Size, pictureSize: Camera.Size?) { - preview = Size(previewSize.width, previewSize.height) - picture = pictureSize?.let { Size(it.width, it.height) } + constructor(previewSize: Camera.Size, pictureSize: Camera.Size?) : this(Size(previewSize.width, previewSize.height), + pictureSize?.let { Size(it.width, it.height) }) { } - constructor(previewSize: Size, pictureSize: Size?) { - preview = previewSize - picture = pictureSize - } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index 1050b7a27a..a427639919 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -16,90 +16,45 @@ package com.google.mlkit.md.camera -import android.content.Context -import android.graphics.ImageFormat -import android.hardware.Camera -import android.hardware.Camera.CameraInfo -import android.hardware.Camera.Parameters -import android.util.Log -import android.view.Surface +import android.util.Size import android.view.SurfaceHolder -import android.view.WindowManager -import com.google.android.gms.common.images.Size -import com.google.mlkit.md.R -import com.google.mlkit.md.Utils -import com.google.mlkit.md.settings.PreferenceUtils -import java.io.IOException -import java.nio.ByteBuffer -import java.util.IdentityHashMap -import kotlin.math.abs -import kotlin.math.ceil -/** - * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This - * receives preview frames from the camera at a specified rate, sends those frames to detector as - * fast as it is able to process. - * - * - * This camera source makes a best effort to manage processing on preview frames as fast as - * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector - * is unable to keep up with the rate of frames generated by the camera. - */ -@Suppress("DEPRECATION") -class CameraSource(private val graphicOverlay: GraphicOverlay) { +abstract class CameraSource { - private var camera: Camera? = null - private var rotationDegrees: Int = 0 + /** + * Returns an array of supported preview [Size] by the Camera + */ + abstract fun getSupportedPreviewSizes(): Array - /** Returns the preview size that is currently in use by the underlying camera. */ - internal var previewSize: Size? = null - private set + /** + * Returns an array of supported picture [Size] by the Camera + */ + abstract fun getSupportedPictureSizes(): Array /** - * Dedicated thread and associated runnable for calling into the detector with frames, as the - * frames become available from the camera. + * Set the [FrameProcessor] instance which is use to process the frames return by the Camera */ - private var processingThread: Thread? = null - private val processingRunnable = FrameProcessingRunnable() + abstract fun setFrameProcessor(processor: FrameProcessor) - private val processorLock = Object() - private var frameProcessor: FrameProcessor? = null + /** + * Set the [Boolean] status to turn ON or OFF the flash + */ + abstract fun setFlashStatus(status: Boolean) /** - * Map to convert between a byte array, received from the camera, and its associated byte buffer. - * We use byte buffers internally because this is a more efficient way to call into native code - * later (avoids a potential copy). - * - * - * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's - * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces - * identity ('==') check on the keys. + * Returns the selected preview [Size] by the Camera */ - private val bytesToByteBuffer = IdentityHashMap() - private val context: Context = graphicOverlay.context + internal abstract fun getSelectedPreviewSize(): Size? /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. * * @param surfaceHolder the surface holder to use for the preview frames. - * @throws IOException if the supplied surface holder could not be used as the preview display. + * @throws Exception if the supplied surface holder could not be used as the preview display. */ - @Synchronized - @Throws(IOException::class) - internal fun start(surfaceHolder: SurfaceHolder) { - if (camera != null) return - - camera = createCamera().apply { - setPreviewDisplay(surfaceHolder) - startPreview() - } - - processingThread = Thread(processingRunnable).apply { - processingRunnable.setActive(true) - start() - } - } + @Throws(Exception::class) + internal abstract fun start(surfaceHolder: SurfaceHolder) /** * Closes the camera and stops sending frames to the underlying frame detector. @@ -111,417 +66,12 @@ class CameraSource(private val graphicOverlay: GraphicOverlay) { * Call [.release] instead to completely shut down this camera source and release the * resources of the underlying detector. */ - @Synchronized - internal fun stop() { - processingRunnable.setActive(false) - processingThread?.let { - try { - // Waits for the thread to complete to ensure that we can't have multiple threads executing - // at the same time (i.e., which would happen if we called start too quickly after stop). - it.join() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing thread interrupted on stop.") - } - processingThread = null - } - - camera?.let { - it.stopPreview() - it.setPreviewCallbackWithBuffer(null) - try { - it.setPreviewDisplay(null) - } catch (e: Exception) { - Log.e(TAG, "Failed to clear camera preview: $e") - } - it.release() - camera = null - } - - // Release the reference to any image buffers, since these will no longer be in use. - bytesToByteBuffer.clear() - } - - /** Stops the camera and releases the resources of the camera and underlying detector. */ - fun release() { - graphicOverlay.clear() - synchronized(processorLock) { - stop() - frameProcessor?.stop() - } - } - - fun setFrameProcessor(processor: FrameProcessor) { - graphicOverlay.clear() - synchronized(processorLock) { - frameProcessor?.stop() - frameProcessor = processor - } - } - - fun updateFlashMode(flashMode: String) { - val parameters = camera?.parameters - parameters?.flashMode = flashMode - camera?.parameters = parameters - } + @Throws(Exception::class) + internal abstract fun stop() /** - * Opens the camera and applies the user settings. - * - * @throws IOException if camera cannot be found or preview cannot be processed. + * Stops the camera and releases the resources of the camera and underlying detector. */ - @Throws(IOException::class) - private fun createCamera(): Camera { - val camera = Camera.open() ?: throw IOException("There is no back-facing camera.") - val parameters = camera.parameters - setPreviewAndPictureSize(camera, parameters) - setRotation(camera, parameters) - - val previewFpsRange = selectPreviewFpsRange(camera) - ?: throw IOException("Could not find suitable preview frames per second range.") - parameters.setPreviewFpsRange( - previewFpsRange[Parameters.PREVIEW_FPS_MIN_INDEX], - previewFpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] - ) - - parameters.previewFormat = IMAGE_FORMAT - - if (parameters.supportedFocusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { - parameters.focusMode = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO - } else { - Log.i(TAG, "Camera auto focus is not supported on this device.") - } - - camera.parameters = parameters - - camera.setPreviewCallbackWithBuffer(processingRunnable::setNextFrame) - - // Four frame buffers are needed for working with the camera: - // - // one for the frame that is currently being executed upon in doing detection - // one for the next pending frame to process immediately upon completing detection - // two for the frames that the camera uses to populate future preview images - // - // Through trial and error it appears that two free buffers, in addition to the two buffers - // used in this code, are needed for the camera to work properly. Perhaps the camera has one - // thread for acquiring images, and another thread for calling into user code. If only three - // buffers are used, then the camera will spew thousands of warning messages when detection - // takes a non-trivial amount of time. - previewSize?.let { - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - } - - return camera - } - - @Throws(IOException::class) - private fun setPreviewAndPictureSize(camera: Camera, parameters: Parameters) { - - // Gives priority to the preview size specified by the user if exists. - val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { - // Camera preview size is based on the landscape mode, so we need to also use the aspect - // ration of display in the same mode for comparison. - val displayAspectRatioInLandscape: Float = - if (Utils.isPortraitMode(graphicOverlay.context)) { - graphicOverlay.height.toFloat() / graphicOverlay.width - } else { - graphicOverlay.width.toFloat() / graphicOverlay.height - } - selectSizePair(camera, displayAspectRatioInLandscape) - } ?: throw IOException("Could not find suitable preview size.") - - previewSize = sizePair.preview.also { - Log.v(TAG, "Camera preview size: $it") - parameters.setPreviewSize(it.width, it.height) - PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) - } - - sizePair.picture?.let { pictureSize -> - Log.v(TAG, "Camera picture size: $pictureSize") - parameters.setPictureSize(pictureSize.width, pictureSize.height) - PreferenceUtils.saveStringPreference( - context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() - ) - } - } - - /** - * Calculates the correct rotation for the given camera id and sets the rotation in the - * parameters. It also sets the camera's display orientation and rotation. - * - * @param parameters the camera parameters for which to set the rotation. - */ - private fun setRotation(camera: Camera, parameters: Parameters) { - val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager - val degrees = when (val deviceRotation = windowManager.defaultDisplay.rotation) { - Surface.ROTATION_0 -> 0 - Surface.ROTATION_90 -> 90 - Surface.ROTATION_180 -> 180 - Surface.ROTATION_270 -> 270 - else -> { - Log.e(TAG, "Bad device rotation value: $deviceRotation") - 0 - } - } - - val cameraInfo = CameraInfo() - Camera.getCameraInfo(CAMERA_FACING_BACK, cameraInfo) - val angle = (cameraInfo.orientation - degrees + 360) % 360 - this.rotationDegrees = angle - camera.setDisplayOrientation(angle) - parameters.setRotation(angle) - } - - /** - * Creates one buffer for the camera preview callback. The size of the buffer is based off of the - * camera preview size and the format of the camera image. - * - * @return a new preview buffer of the appropriate size for the current camera settings. - */ - private fun createPreviewBuffer(previewSize: Size): ByteArray { - val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) - val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() - val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 - - // Creating the byte array this way and wrapping it, as opposed to using .allocate(), - // should guarantee that there will be an array to work with. - val byteArray = ByteArray(bufferSize) - val byteBuffer = ByteBuffer.wrap(byteArray) - check(!(!byteBuffer.hasArray() || !byteBuffer.array()!!.contentEquals(byteArray))) { - // This should never happen. If it does, then we wouldn't be passing the preview content to - // the underlying detector later. - "Failed to create valid buffer for camera source." - } - - bytesToByteBuffer[byteArray] = byteBuffer - return byteArray - } - - /** - * This runnable controls access to the underlying receiver, calling it to process frames when - * available from the camera. This is designed to run detection on frames as fast as possible - * (i.e., without unnecessary context switching or waiting on the next frame). - * - * - * While detection is running on a frame, new frames may be received from the camera. As these - * frames come in, the most recent frame is held onto as pending. As soon as detection and its - * associated processing is done for the previous frame, detection on the mostly recently received - * frame will immediately start on the same thread. - */ - private inner class FrameProcessingRunnable internal constructor() : Runnable { - - // This lock guards all of the member variables below. - private val lock = Object() - private var active = true - - // These pending variables hold the state associated with the new frame awaiting processing. - private var pendingFrameData: ByteBuffer? = null - - /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ - internal fun setActive(active: Boolean) { - synchronized(lock) { - this.active = active - lock.notifyAll() - } - } - - /** - * Sets the frame data received from the camera. This adds the previous unused frame buffer (if - * present) back to the camera, and keeps a pending reference to the frame data for future use. - */ - internal fun setNextFrame(data: ByteArray, camera: Camera) { - synchronized(lock) { - pendingFrameData?.let { - camera.addCallbackBuffer(it.array()) - pendingFrameData = null - } - - if (!bytesToByteBuffer.containsKey(data)) { - Log.d( - TAG, - "Skipping frame. Could not find ByteBuffer associated with the image data from the camera." - ) - return - } - - pendingFrameData = bytesToByteBuffer[data] - - // Notify the processor thread if it is waiting on the next frame (see below). - lock.notifyAll() - } - } - - /** - * As long as the processing thread is active, this executes detection on frames continuously. - * The next pending frame is either immediately available or hasn't been received yet. Once it - * is available, we transfer the frame info to local variables and run detection on that frame. - * It immediately loops back for the next frame without pausing. - * - * - * If detection takes longer than the time in between new frames from the camera, this will - * mean that this loop will run without ever waiting on a frame, avoiding any context switching - * or frame acquisition time latency. - * - * - * If you find that this is using more CPU than you'd like, you should probably decrease the - * FPS setting above to allow for some idle time in between frames. - */ - override fun run() { - var data: ByteBuffer? - - while (true) { - synchronized(lock) { - while (active && pendingFrameData == null) { - try { - // Wait for the next frame to be received from the camera, since we don't have it yet. - lock.wait() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing loop terminated.", e) - return - } - } - - if (!active) { - // Exit the loop once this camera source is stopped or released. We check this here, - // immediately after the wait() above, to handle the case where setActive(false) had - // been called, triggering the termination of this loop. - return - } - - // Hold onto the frame data locally, so that we can use this for detection - // below. We need to clear pendingFrameData to ensure that this buffer isn't - // recycled back to the camera before we are done using that data. - data = pendingFrameData - pendingFrameData = null - } - - try { - synchronized(processorLock) { - val frameMetadata = FrameMetadata(previewSize!!.width, previewSize!!.height, rotationDegrees) - data?.let { - frameProcessor?.process(it, frameMetadata, graphicOverlay) - } - } - } catch (t: Exception) { - Log.e(TAG, "Exception thrown from receiver.", t) - } finally { - data?.let { - camera?.addCallbackBuffer(it.array()) - } - } - } - } - } - - companion object { - - const val CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK - - private const val TAG = "CameraSource" - - private const val IMAGE_FORMAT = ImageFormat.NV21 - private const val MIN_CAMERA_PREVIEW_WIDTH = 400 - private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 - private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 - private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 - private const val REQUESTED_CAMERA_FPS = 30.0f - - /** - * Selects the most suitable preview and picture size, given the display aspect ratio in landscape - * mode. - * - * - * It's firstly trying to pick the one that has closest aspect ratio to display view with its - * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there're multiple candidates, choose the one having longest - * width. - * - * - * If the above looking up failed, chooses the one that has the minimum sum of the differences - * between the desired values and the actual values for width and height. - * - * - * Even though we only need to find the preview size, it's necessary to find both the preview - * size and the picture size of the camera together, because these need to have the same aspect - * ratio. On some hardware, if you would only set the preview size, you will get a distorted - * image. - * - * @param camera the camera to select a preview size from - * @return the selected preview and picture size pair - */ - private fun selectSizePair(camera: Camera, displayAspectRatioInLandscape: Float): CameraSizePair? { - val validPreviewSizes = Utils.generateValidPreviewSizeList(camera) - - var selectedPair: CameraSizePair? = null - // Picks the preview size that has closest aspect ratio to display view. - var minAspectRatioDiff = Float.MAX_VALUE - - for (sizePair in validPreviewSizes) { - val previewSize = sizePair.preview - if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { - continue - } - - val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() - val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) - if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { - if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { - selectedPair = sizePair - } - } else if (aspectRatioDiff < minAspectRatioDiff) { - minAspectRatioDiff = aspectRatioDiff - selectedPair = sizePair - } - } - - if (selectedPair == null) { - // Picks the one that has the minimum sum of the differences between the desired values and - // the actual values for width and height. - var minDiff = Integer.MAX_VALUE - for (sizePair in validPreviewSizes) { - val size = sizePair.preview - val diff = - abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + - abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) - if (diff < minDiff) { - selectedPair = sizePair - minDiff = diff - } - } - } - - return selectedPair - } - - /** - * Selects the most suitable preview frames per second range. - * - * @param camera the camera to select a frames per second range from - * @return the selected preview frames per second range - */ - private fun selectPreviewFpsRange(camera: Camera): IntArray? { - // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame - // rates. - val desiredPreviewFpsScaled = (REQUESTED_CAMERA_FPS * 1000f).toInt() + abstract fun release() - // The method for selecting the best range is to minimize the sum of the differences between - // the desired value and the upper and lower bounds of the range. This may select a range - // that the desired value is outside of, but this is often preferred. For example, if the - // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the - // range (15, 30). - var selectedFpsRange: IntArray? = null - var minDiff = Integer.MAX_VALUE - for (range in camera.parameters.supportedPreviewFpsRange) { - val deltaMin = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MIN_INDEX] - val deltaMax = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MAX_INDEX] - val diff = abs(deltaMin) + abs(deltaMax) - if (diff < minDiff) { - selectedFpsRange = range - minDiff = diff - } - } - return selectedFpsRange - } - } -} +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt new file mode 100644 index 0000000000..57bb69392e --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt @@ -0,0 +1,39 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraMetadata +import android.util.Log + +object CameraSourceFactory { + + const val TAG = "CameraSourceFactory" + + fun createCameraSource(graphicOverlay: GraphicOverlay): CameraSource { + val characteristics = Camera2APISource.getCameraCharacteristics(graphicOverlay.context) + val halSupport = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) + return if (halSupport == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY){ + Log.d(TAG, "Camera API source used") + CameraAPISource(graphicOverlay) + } else { + Log.d(TAG, "Camera2 API source used") + Camera2APISource(graphicOverlay) + } + } + +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt index 3694222f4f..1157d3259e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt @@ -19,13 +19,16 @@ package com.google.mlkit.md.camera import android.content.Context import android.util.AttributeSet import android.util.Log +import android.util.Size import android.view.SurfaceHolder import android.view.SurfaceView import android.widget.FrameLayout -import com.google.android.gms.common.images.Size +import androidx.annotation.MainThread import com.google.mlkit.md.R import com.google.mlkit.md.Utils import java.io.IOException +import kotlin.math.abs +import kotlin.math.roundToInt /** Preview the camera image in the screen. */ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { @@ -45,13 +48,16 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) } - @Throws(IOException::class) + @MainThread + @Throws(Exception::class) fun start(cameraSource: CameraSource) { this.cameraSource = cameraSource startRequested = true startIfReady() } + @MainThread + @Throws(Exception::class) fun stop() { cameraSource?.let { it.stop() @@ -60,16 +66,17 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c } } - @Throws(IOException::class) + @Throws(Exception::class) private fun startIfReady() { if (startRequested && surfaceAvailable) { - cameraSource?.start(surfaceView.holder) - requestLayout() - graphicOverlay?.let { overlay -> - cameraSource?.let { - overlay.setCameraInfo(it) + Log.d(TAG, "Starting camera") + cameraSource?.apply { + start(surfaceView.holder) + requestLayout() + graphicOverlay?.let { + it.setCameraInfo(this) + it.clear() } - overlay.clear() } startRequested = false } @@ -79,7 +86,7 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c val layoutWidth = right - left val layoutHeight = bottom - top - cameraSource?.previewSize?.let { cameraPreviewSize = it } + cameraSource?.getSelectedPreviewSize()?.let { cameraPreviewSize = it } val previewSizeRatio = cameraPreviewSize?.let { size -> if (Utils.isPortraitMode(context)) { @@ -88,38 +95,37 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c } else { size.width.toFloat() / size.height } - } ?: layoutWidth.toFloat() / layoutHeight.toFloat() - - // Match the width of the child view to its parent. - val childHeight = (layoutWidth / previewSizeRatio).toInt() - if (childHeight <= layoutHeight) { - for (i in 0 until childCount) { - getChildAt(i).layout(0, 0, layoutWidth, childHeight) - } + } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) + + //Calculate the new surface view size by scaling the layout width/height based on aspect ratio + val newLayoutWidth: Int + val newLayoutHeight: Int + if (width < height * previewSizeRatio) { + newLayoutHeight = height + newLayoutWidth = (height * previewSizeRatio).roundToInt() } else { - // When the child view is too tall to be fitted in its parent: If the child view is - // static overlay view container (contains views such as bottom prompt chip), we apply - // the size of the parent view to it. Otherwise, we offset the top/bottom position - // equally to position it in the center of the parent. - val excessLenInHalf = (childHeight - layoutHeight) / 2 - for (i in 0 until childCount) { - val childView = getChildAt(i) - when (childView.id) { - R.id.static_overlay_container -> { - childView.layout(0, 0, layoutWidth, layoutHeight) - } - else -> { - childView.layout( - 0, -excessLenInHalf, layoutWidth, layoutHeight + excessLenInHalf - ) - } - } + newLayoutWidth = width + newLayoutHeight = (width / previewSizeRatio).roundToInt() + } + + //Apply the new width & height to surface view only in a way that it should center crop the camera preview + val excessWidthInHalf = abs(newLayoutWidth - layoutWidth) / 2 + val excessHeightInHalf = abs(newLayoutHeight - layoutHeight) / 2 + surfaceView.layout( + -excessWidthInHalf, -excessHeightInHalf, newLayoutWidth - excessWidthInHalf, newLayoutHeight - excessHeightInHalf + ) + + //Apply the actual layout width & height to rest of its child views + for (i in 0 until childCount) { + val childView = getChildAt(i) + if (!childView.equals(surfaceView)){ + childView.layout(0, 0, layoutWidth, layoutHeight) } } try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } @@ -129,7 +135,7 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c surfaceAvailable = true try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt index 9f8143d6ff..8e83c3757e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt @@ -16,14 +16,20 @@ package com.google.mlkit.md.camera +import com.google.android.odml.image.MlImage import java.nio.ByteBuffer /** An interface to process the input camera frame and perform detection on it. */ interface FrameProcessor { /** Processes the input frame with the underlying detector. */ + @Deprecated("Keeping it only to support Camera API frame processing") fun process(data: ByteBuffer, frameMetadata: FrameMetadata, graphicOverlay: GraphicOverlay) + /** Processes the input frame with the underlying detector. + * @return true if holding [MlImage] for processing otherwise return false */ + fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean + /** Stops the underlying detector and release resources. */ fun stop() } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt index 4db20238f8..428675d0c8 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt @@ -22,8 +22,9 @@ import androidx.annotation.GuardedBy import com.google.android.gms.tasks.OnFailureListener import com.google.android.gms.tasks.Task import com.google.android.gms.tasks.TaskExecutors -import com.google.mlkit.md.addOnFailureListener -import com.google.mlkit.md.addOnSuccessListener +import com.google.android.odml.image.MediaImageExtractor +import com.google.android.odml.image.MlImage +import com.google.mlkit.md.Camera2InputInfo import com.google.mlkit.md.CameraInputInfo import com.google.mlkit.md.InputInfo import com.google.mlkit.md.ScopedExecutor @@ -46,8 +47,14 @@ abstract class FrameProcessorBase : FrameProcessor { @GuardedBy("this") private var processingFrameMetaData: FrameMetadata? = null + + // To keep the reference of current detection task + @GuardedBy("this") + private var currentDetectionTask: Task? = null + private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) + @Deprecated("Keeping it only to support Camera API frame processing") @Synchronized override fun process( data: ByteBuffer, @@ -61,6 +68,7 @@ abstract class FrameProcessorBase : FrameProcessor { } } + @Deprecated("Keeping it only to support Camera API frame processing") @Synchronized private fun processLatestFrame(graphicOverlay: GraphicOverlay) { processingFrame = latestFrame @@ -83,13 +91,46 @@ abstract class FrameProcessorBase : FrameProcessor { this@FrameProcessorBase.onSuccess(CameraInputInfo(frame, frameMetaData), results, graphicOverlay) processLatestFrame(graphicOverlay) } - .addOnFailureListener(executor) { e -> OnFailureListener { this@FrameProcessorBase.onFailure(it) } } + .addOnFailureListener(executor) { e -> this@FrameProcessorBase.onFailure(e) } + } + + @Synchronized + override fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return processLatestFrame(image, graphicOverlay) + } + + @Synchronized + private fun processLatestFrame(frame: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return if(currentDetectionTask?.isComplete == false){ + false + }else { + //val startMs = SystemClock.elapsedRealtime() + currentDetectionTask = detectInImage(frame).addOnCompleteListener(executor) { task -> + if (task.isSuccessful) { + //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") + MediaImageExtractor.extract(frame).let { + this@FrameProcessorBase.onSuccess( + Camera2InputInfo(it, frame.rotation), task.result, graphicOverlay + ) + } + } else { + //Log.d(TAG, "Detect In Image Failure: ${e.message}") + this@FrameProcessorBase.onFailure(task.exception) + } + + //Close the processing frame + frame.close() + } + true + } } override fun stop() { executor.shutdown() } + protected abstract fun detectInImage(image: MlImage): Task + @Deprecated("Keeping it only to support Camera API frame processing") protected abstract fun detectInImage(image: InputImage): Task /** Be called when the detection succeeds. */ @@ -99,7 +140,7 @@ abstract class FrameProcessorBase : FrameProcessor { graphicOverlay: GraphicOverlay ) - protected abstract fun onFailure(e: Exception) + protected abstract fun onFailure(e: Exception?) companion object { private const val TAG = "FrameProcessorBase" diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt index 4b18a19279..d841545788 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt @@ -39,6 +39,7 @@ import java.util.ArrayList * Associated [Graphic] items should use [.translateX] and [ ][.translateY] to convert to view coordinate from the preview's coordinate. */ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attrs) { + private val lock = Any() private var previewWidth: Int = 0 @@ -79,7 +80,7 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr * coordinates later. */ fun setCameraInfo(cameraSource: CameraSource) { - val previewSize = cameraSource.previewSize ?: return + val previewSize = cameraSource.getSelectedPreviewSize() ?: return if (Utils.isPortraitMode(context)) { // Swap width and height when in portrait, since camera's natural orientation is landscape. previewWidth = previewSize.height diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt index c17a1bc4ce..e51607030e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt @@ -21,6 +21,7 @@ import android.content.Context import androidx.annotation.MainThread import androidx.lifecycle.AndroidViewModel import androidx.lifecycle.MutableLiveData +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo import com.google.mlkit.md.productsearch.Product import com.google.mlkit.md.productsearch.SearchedObject @@ -32,7 +33,7 @@ import java.util.HashSet class WorkflowModel(application: Application) : AndroidViewModel(application) { val workflowState = MutableLiveData() - val objectToSearch = MutableLiveData() + val objectToSearch = MutableLiveData() val searchedObject = MutableLiveData() val detectedBarcode = MutableLiveData() @@ -41,7 +42,7 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { var isCameraLive = false private set - private var confirmedObject: DetectedObjectInfo? = null + private var confirmedObject: ConfirmedObjectInfo? = null private val context: Context get() = getApplication().applicationContext @@ -74,12 +75,14 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { fun confirmingObject(confirmingObject: DetectedObjectInfo, progress: Float) { val isConfirmed = progress.compareTo(1f) == 0 if (isConfirmed) { - confirmedObject = confirmingObject - if (PreferenceUtils.isAutoSearchEnabled(context)) { - setWorkflowState(WorkflowState.SEARCHING) - triggerSearch(confirmingObject) - } else { - setWorkflowState(WorkflowState.CONFIRMED) + ConfirmedObjectInfo.from(confirmingObject).also { + confirmedObject = it + if (PreferenceUtils.isAutoSearchEnabled(context)) { + setWorkflowState(WorkflowState.SEARCHING) + triggerSearch(it) + } else { + setWorkflowState(WorkflowState.CONFIRMED) + } } } else { setWorkflowState(WorkflowState.CONFIRMING) @@ -94,15 +97,15 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { } } - private fun triggerSearch(detectedObject: DetectedObjectInfo) { - val objectId = detectedObject.objectId ?: throw NullPointerException() + private fun triggerSearch(confirmedObject: ConfirmedObjectInfo) { + val objectId = confirmedObject.objectId ?: throw NullPointerException() if (objectIdsToSearch.contains(objectId)) { // Already in searching. return } objectIdsToSearch.add(objectId) - objectToSearch.value = detectedObject + objectToSearch.value = confirmedObject } fun markCameraLive() { @@ -114,14 +117,14 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { isCameraLive = false } - fun onSearchCompleted(detectedObject: DetectedObjectInfo, products: List) { - val lConfirmedObject = confirmedObject - if (detectedObject != lConfirmedObject) { + fun onSearchCompleted(confirmedObject: ConfirmedObjectInfo, products: List) { + val lConfirmedObject = this@WorkflowModel.confirmedObject + if (confirmedObject != lConfirmedObject) { // Drops the search result from the object that has lost focus. return } - objectIdsToSearch.remove(detectedObject.objectId) + objectIdsToSearch.remove(confirmedObject.objectId) setWorkflowState(WorkflowState.SEARCHED) searchedObject.value = SearchedObject(context.resources, lConfirmedObject, products) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt new file mode 100644 index 0000000000..a98dd15876 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.objectdetection + +import android.graphics.Bitmap +import android.graphics.Bitmap.CompressFormat +import android.graphics.Rect +import android.util.Log +import com.google.mlkit.vision.objects.DetectedObject +import java.io.ByteArrayOutputStream +import java.io.IOException + +/** + * Holds the detected object info and its related image info. + */ + +class ConfirmedObjectInfo private constructor(val objectId: Int?, val objectIndex: Int, val boundingBox: Rect, + val labels: List, val bitmap: Bitmap) { + + private var jpegBytes: ByteArray? = null + + val imageData: ByteArray? + @Synchronized get() { + if (jpegBytes == null) { + try { + ByteArrayOutputStream().use { stream -> + bitmap.compress(CompressFormat.JPEG, /* quality= */ 100, stream) + jpegBytes = stream.toByteArray() + } + } catch (e: IOException) { + Log.e(TAG, "Error getting object image data!") + } + } + return jpegBytes + } + + companion object { + private const val TAG = "ConfirmedObject" + + fun from(detectedObjectInfo: DetectedObjectInfo): ConfirmedObjectInfo{ + return ConfirmedObjectInfo(detectedObjectInfo.objectId, detectedObjectInfo.objectIndex, + detectedObjectInfo.boundingBox, detectedObjectInfo.labels, detectedObjectInfo.getBitmap()) + } + } + +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt index 6ff02ba85a..c15fa28a6f 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt @@ -35,26 +35,12 @@ class DetectedObjectInfo( ) { private var bitmap: Bitmap? = null - private var jpegBytes: ByteArray? = null val objectId: Int? = detectedObject.trackingId val boundingBox: Rect = detectedObject.boundingBox val labels: List = detectedObject.labels - val imageData: ByteArray? - @Synchronized get() { - if (jpegBytes == null) { - try { - ByteArrayOutputStream().use { stream -> - getBitmap().compress(CompressFormat.JPEG, /* quality= */ 100, stream) - jpegBytes = stream.toByteArray() - } - } catch (e: IOException) { - Log.e(TAG, "Error getting object image data!") - } - } - return jpegBytes - } + @Synchronized fun getBitmap(): Bitmap { @@ -67,11 +53,15 @@ class DetectedObjectInfo( boundingBox.width(), boundingBox.height() ) - if (createdBitmap.width > MAX_IMAGE_WIDTH) { + (if (createdBitmap.width > MAX_IMAGE_WIDTH) { val dstHeight = (MAX_IMAGE_WIDTH.toFloat() / createdBitmap.width * createdBitmap.height).toInt() - bitmap = Bitmap.createScaledBitmap(createdBitmap, MAX_IMAGE_WIDTH, dstHeight, /* filter= */ false) + Bitmap.createScaledBitmap(createdBitmap, MAX_IMAGE_WIDTH, dstHeight, /* filter= */ false) + } + else{ + createdBitmap + }).also { + bitmap = it } - createdBitmap } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt index d0f7382f52..d1b921ab4e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt @@ -23,6 +23,7 @@ import androidx.annotation.MainThread import androidx.core.util.forEach import androidx.core.util.set import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.R @@ -31,6 +32,7 @@ import com.google.mlkit.md.camera.FrameProcessorBase import com.google.mlkit.md.settings.PreferenceUtils import com.google.mlkit.common.model.LocalModel import com.google.mlkit.md.InputInfo +import com.google.mlkit.vision.barcode.common.Barcode import com.google.mlkit.vision.common.InputImage import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions @@ -92,9 +94,10 @@ class MultiObjectProcessor( } } - override fun detectInImage(image: InputImage): Task> { - return detector.process(image) - } + override fun detectInImage(image: MlImage): Task> = detector.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = detector.process(image) @MainThread override fun onSuccess( @@ -204,7 +207,7 @@ class MultiObjectProcessor( return distance < objectSelectionDistanceThreshold } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Object detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt index 288e51bc44..d2d63de774 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt @@ -20,6 +20,7 @@ import android.graphics.RectF import android.util.Log import androidx.annotation.MainThread import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.R @@ -85,9 +86,10 @@ class ProminentObjectProcessor( } } - override fun detectInImage(image: InputImage): Task> { - return detector.process(image) - } + override fun detectInImage(image: MlImage): Task> = detector.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = detector.process(image) @MainThread override fun onSuccess( @@ -176,7 +178,7 @@ class ProminentObjectProcessor( return reticleRect.intersect(boxRect) } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Object detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt index d9ba592cd3..dec25b172e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt @@ -22,6 +22,7 @@ import com.android.volley.RequestQueue import com.android.volley.toolbox.JsonObjectRequest import com.android.volley.toolbox.Volley import com.google.android.gms.tasks.Tasks +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo import java.util.ArrayList import java.util.concurrent.Callable @@ -35,11 +36,11 @@ class SearchEngine(context: Context) { private val requestCreationExecutor: ExecutorService = Executors.newSingleThreadExecutor() fun search( - detectedObject: DetectedObjectInfo, - listener: (detectedObject: DetectedObjectInfo, productList: List) -> Unit + confirmedObject: ConfirmedObjectInfo, + listener: (confirmedObject: ConfirmedObjectInfo, productList: List) -> Unit ) { // Crops the object image out of the full image is expensive, so do it off the UI thread. - Tasks.call(requestCreationExecutor, Callable { createRequest(detectedObject) }) + Tasks.call(requestCreationExecutor, Callable { createRequest(confirmedObject) }) .addOnSuccessListener { productRequest -> searchRequestQueue.add(productRequest.setTag(TAG)) } .addOnFailureListener { e -> Log.e(TAG, "Failed to create product search request!", e) @@ -50,7 +51,7 @@ class SearchEngine(context: Context) { Product(/* imageUrl= */"", "Product title $i", "Product subtitle $i") ) } - listener.invoke(detectedObject, productList) + listener.invoke(confirmedObject, productList) } } @@ -63,7 +64,7 @@ class SearchEngine(context: Context) { private const val TAG = "SearchEngine" @Throws(Exception::class) - private fun createRequest(searchingObject: DetectedObjectInfo): JsonObjectRequest { + private fun createRequest(searchingObject: ConfirmedObjectInfo): JsonObjectRequest { val objectImageData = searchingObject.imageData ?: throw Exception("Failed to get object image data!") diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt index 201746025d..2191619ffb 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt @@ -21,12 +21,13 @@ import android.graphics.Bitmap import android.graphics.Rect import com.google.mlkit.md.R import com.google.mlkit.md.Utils +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo /** Hosts the detected object info and its search result. */ class SearchedObject( resources: Resources, - private val detectedObject: DetectedObjectInfo, + private val confirmedObject: ConfirmedObjectInfo, val productList: List ) { @@ -34,14 +35,14 @@ class SearchedObject( private var objectThumbnail: Bitmap? = null val objectIndex: Int - get() = detectedObject.objectIndex + get() = confirmedObject.objectIndex val boundingBox: Rect - get() = detectedObject.boundingBox + get() = confirmedObject.boundingBox @Synchronized fun getObjectThumbnail(): Bitmap = objectThumbnail ?: let { - Utils.getCornerRoundedBitmap(detectedObject.getBitmap(), objectThumbnailCornerRadius) + Utils.getCornerRoundedBitmap(confirmedObject.bitmap, objectThumbnailCornerRadius) .also { objectThumbnail = it } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt index 16860cb34b..883c94ea02 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt @@ -18,9 +18,9 @@ package com.google.mlkit.md.settings import android.content.Context import android.graphics.RectF -import android.preference.PreferenceManager +import androidx.preference.PreferenceManager +import android.util.Size import androidx.annotation.StringRes -import com.google.android.gms.common.images.Size import com.google.mlkit.md.R import com.google.mlkit.md.camera.CameraSizePair import com.google.mlkit.md.camera.GraphicOverlay diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt index e51629d765..1892380c32 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt @@ -16,21 +16,28 @@ package com.google.mlkit.md.settings +import android.app.Activity +import android.content.Context +import android.content.Intent import android.os.Bundle +import android.util.Log import androidx.appcompat.app.AppCompatActivity import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.camera.CameraSizePair +import com.google.mlkit.md.camera.CameraSource /** Hosts the preference fragment to configure settings. */ class SettingsActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) - setContentView(R.layout.activity_settings) + val previewSizeList = intent.getParcelableArrayListExtra(EXTRA_PREVIEW_SIZE_LIST) ?: arrayListOf() supportActionBar?.setDisplayHomeAsUpEnabled(true) supportFragmentManager .beginTransaction() - .replace(R.id.settings_container, SettingsFragment()) + .replace(R.id.settings_container, SettingsFragment.newInstance(previewSizeList)) .commit() } @@ -38,4 +45,15 @@ class SettingsActivity : AppCompatActivity() { onBackPressed() return true } + + companion object { + private const val EXTRA_PREVIEW_SIZE_LIST = "extra_preview_size_list" + + fun newIntent(context: Context, cameraSource: CameraSource?) = Intent(context, SettingsActivity::class.java).apply { + cameraSource?.let { + putParcelableArrayListExtra(EXTRA_PREVIEW_SIZE_LIST, ArrayList(Utils.generateValidPreviewSizeList(it))) + } + } + } + } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt index ac4b3d1d38..933e5cc9a9 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt @@ -16,14 +16,11 @@ package com.google.mlkit.md.settings -import android.hardware.Camera import android.os.Bundle import androidx.preference.ListPreference import androidx.preference.PreferenceFragmentCompat -import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.md.R -import com.google.mlkit.md.Utils -import java.util.HashMap +import com.google.mlkit.md.camera.CameraSizePair /** Configures App settings. */ class SettingsFragment : PreferenceFragmentCompat() { @@ -34,14 +31,12 @@ class SettingsFragment : PreferenceFragmentCompat() { } private fun setUpRearCameraPreviewSizePreference() { - val previewSizePreference = - findPreference(getString(R.string.pref_key_rear_camera_preview_size))!! - - var camera: Camera? = null - - try { - camera = Camera.open(CameraSource.CAMERA_FACING_BACK) - val previewSizeList = Utils.generateValidPreviewSizeList(camera!!) + val previewSizePreference = findPreference(getString(R.string.pref_key_rear_camera_preview_size))!! + val previewSizeList = arguments?.getParcelableArrayList(ARG_PREVIEW_SIZE_LIST) ?: arrayListOf() + if (previewSizeList.isEmpty()){ + previewSizePreference.parent?.removePreference(previewSizePreference) + } + else{ val previewSizeStringValues = arrayOfNulls(previewSizeList.size) val previewToPictureSizeStringMap = HashMap() for (i in previewSizeList.indices) { @@ -65,11 +60,16 @@ class SettingsFragment : PreferenceFragmentCompat() { ) true } - } catch (e: Exception) { - // If there's no camera for the given camera id, hide the corresponding preference. - previewSizePreference.parent?.removePreference(previewSizePreference) - } finally { - camera?.release() + } + } + + companion object { + private const val ARG_PREVIEW_SIZE_LIST = "arg_preview_size_list" + + fun newInstance(previewSizeList: ArrayList) = SettingsFragment().apply { + arguments = Bundle().apply { + putParcelableArrayList(ARG_PREVIEW_SIZE_LIST, previewSizeList) + } } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt new file mode 100644 index 0000000000..35b20fc082 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt @@ -0,0 +1,73 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.utils + +import android.graphics.Bitmap +import android.graphics.Matrix +import android.util.Log +import androidx.exifinterface.media.ExifInterface + +private const val TAG: String = "ExifUtils" + +/** Transforms rotation and mirroring information into one of the [ExifInterface] constants */ +fun computeExifOrientation(rotationDegrees: Int, mirrored: Boolean) = when { + rotationDegrees == 0 && !mirrored -> ExifInterface.ORIENTATION_NORMAL + rotationDegrees == 0 && mirrored -> ExifInterface.ORIENTATION_FLIP_HORIZONTAL + rotationDegrees == 180 && !mirrored -> ExifInterface.ORIENTATION_ROTATE_180 + rotationDegrees == 180 && mirrored -> ExifInterface.ORIENTATION_FLIP_VERTICAL + rotationDegrees == 270 && mirrored -> ExifInterface.ORIENTATION_TRANSVERSE + rotationDegrees == 90 && !mirrored -> ExifInterface.ORIENTATION_ROTATE_90 + rotationDegrees == 90 && mirrored -> ExifInterface.ORIENTATION_TRANSPOSE + rotationDegrees == 270 && mirrored -> ExifInterface.ORIENTATION_ROTATE_270 + rotationDegrees == 270 && !mirrored -> ExifInterface.ORIENTATION_TRANSVERSE + else -> ExifInterface.ORIENTATION_UNDEFINED +} + +/** + * Helper function used to convert an EXIF orientation enum into a transformation matrix + * that can be applied to a bitmap. + * + * @return matrix - Transformation required to properly display [Bitmap] + */ +fun decodeExifOrientation(exifOrientation: Int): Matrix { + val matrix = Matrix() + + // Apply transformation corresponding to declared EXIF orientation + when (exifOrientation) { + ExifInterface.ORIENTATION_NORMAL -> Unit + ExifInterface.ORIENTATION_UNDEFINED -> Unit + ExifInterface.ORIENTATION_ROTATE_90 -> matrix.postRotate(90F) + ExifInterface.ORIENTATION_ROTATE_180 -> matrix.postRotate(180F) + ExifInterface.ORIENTATION_ROTATE_270 -> matrix.postRotate(270F) + ExifInterface.ORIENTATION_FLIP_HORIZONTAL -> matrix.postScale(-1F, 1F) + ExifInterface.ORIENTATION_FLIP_VERTICAL -> matrix.postScale(1F, -1F) + ExifInterface.ORIENTATION_TRANSPOSE -> { + matrix.postScale(-1F, 1F) + matrix.postRotate(270F) + } + ExifInterface.ORIENTATION_TRANSVERSE -> { + matrix.postScale(-1F, 1F) + matrix.postRotate(90F) + } + + // Error out if the EXIF orientation is invalid + else -> Log.e(TAG, "Invalid orientation: $exifOrientation") + } + + // Return the resulting matrix + return matrix +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt new file mode 100644 index 0000000000..cd96e842d1 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt @@ -0,0 +1,95 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.utils + +import android.content.Context +import android.hardware.camera2.CameraCharacteristics +import android.view.OrientationEventListener +import android.view.Surface +import androidx.lifecycle.LiveData + + +/** + * Calculates closest 90-degree orientation to compensate for the device + * rotation relative to sensor orientation, i.e., allows user to see camera + * frames with the expected orientation. + */ +class OrientationLiveData( + context: Context, + characteristics: CameraCharacteristics +): LiveData() { + + private val listener = object : OrientationEventListener(context.applicationContext) { + override fun onOrientationChanged(orientation: Int) { + val rotation = when { + orientation <= 45 -> Surface.ROTATION_0 + orientation <= 135 -> Surface.ROTATION_90 + orientation <= 225 -> Surface.ROTATION_180 + orientation <= 315 -> Surface.ROTATION_270 + else -> Surface.ROTATION_0 + } + val relative = computeRelativeRotation(characteristics, rotation) + if (relative != value) postValue(relative) + } + } + + override fun onActive() { + super.onActive() + listener.enable() + } + + override fun onInactive() { + super.onInactive() + listener.disable() + } + + companion object { + + /** + * Computes rotation required to transform from the camera sensor orientation to the + * device's current orientation in degrees. + * + * @param characteristics the [CameraCharacteristics] to query for the sensor orientation. + * @param surfaceRotation the current device orientation as a Surface constant + * @return the relative rotation from the camera sensor to the current device orientation. + */ + @JvmStatic + private fun computeRelativeRotation( + characteristics: CameraCharacteristics, + surfaceRotation: Int + ): Int { + val sensorOrientationDegrees = + characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! + + val deviceOrientationDegrees = when (surfaceRotation) { + Surface.ROTATION_0 -> 0 + Surface.ROTATION_90 -> 90 + Surface.ROTATION_180 -> 180 + Surface.ROTATION_270 -> 270 + else -> 0 + } + + // Reverse device orientation for front-facing cameras + val sign = if (characteristics.get(CameraCharacteristics.LENS_FACING) == + CameraCharacteristics.LENS_FACING_FRONT) 1 else -1 + + // Calculate desired JPEG orientation relative to camera orientation to make + // the image upright relative to the device orientation + return (sensorOrientationDegrees - (deviceOrientationDegrees * sign) + 360) % 360 + } + } +} diff --git a/android/material-showcase/gradle/wrapper/gradle-wrapper.jar b/android/material-showcase/gradle/wrapper/gradle-wrapper.jar index f6b961fd5a..7454180f2a 100644 Binary files a/android/material-showcase/gradle/wrapper/gradle-wrapper.jar and b/android/material-showcase/gradle/wrapper/gradle-wrapper.jar differ diff --git a/android/material-showcase/gradle/wrapper/gradle-wrapper.properties b/android/material-showcase/gradle/wrapper/gradle-wrapper.properties index 7f975cbe9d..ffed3a254e 100644 --- a/android/material-showcase/gradle/wrapper/gradle-wrapper.properties +++ b/android/material-showcase/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Sun Jan 31 23:07:35 PST 2021 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip diff --git a/android/material-showcase/gradlew b/android/material-showcase/gradlew index cccdd3d517..1b6c787337 100755 --- a/android/material-showcase/gradlew +++ b/android/material-showcase/gradlew @@ -1,78 +1,129 @@ -#!/usr/bin/env sh +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -81,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -89,84 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=$((i+1)) + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/android/material-showcase/gradlew.bat b/android/material-showcase/gradlew.bat index e95643d6a2..ac1b06f938 100644 --- a/android/material-showcase/gradlew.bat +++ b/android/material-showcase/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -13,15 +29,18 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -35,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -45,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell