From e07a940d2a25d278c2a51842be159f7a7be747f1 Mon Sep 17 00:00:00 2001 From: Julie Zhou Date: Tue, 2 May 2023 10:56:33 -0700 Subject: [PATCH 01/18] Mirror the commit https://github.com/googlesamples/mlkit/commit/8b31d95f6d1872fc7d57e4371ece4348f2a6cad4 to local branch. --- ios/quickstarts/vision/VisionExample/UIUtilities.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ios/quickstarts/vision/VisionExample/UIUtilities.swift b/ios/quickstarts/vision/VisionExample/UIUtilities.swift index 26089279b4..f53bd747ff 100644 --- a/ios/quickstarts/vision/VisionExample/UIUtilities.swift +++ b/ios/quickstarts/vision/VisionExample/UIUtilities.swift @@ -451,7 +451,7 @@ public class UIUtilities { /// /// - Parameters: /// - fromPoint: The starting point. - /// - endPoint: The end point. + /// - toPoint: The end point. /// - Returns: The distance. private static func distance(fromPoint: Vision3DPoint, toPoint: Vision3DPoint) -> CGFloat { let xDiff = fromPoint.x - toPoint.x From d2cdbcc0a4c91df3abcdf559cdc027ab1a3a352e Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Wed, 19 Jul 2023 06:24:32 +0530 Subject: [PATCH 02/18] Camera to Camera2 API upgrade related changes done. --- .../java/com/google/mlkit/md/InputInfo.kt | 18 + .../main/java/com/google/mlkit/md/Utils.kt | 79 +++ .../google/mlkit/md/camera/Camera2Source.kt | 609 ++++++++++++++++++ .../mlkit/md/camera/Camera2SourcePreview.kt | 148 +++++ .../google/mlkit/md/camera/CameraSizePair.kt | 2 +- .../google/mlkit/md/camera/CameraSource.kt | 2 +- .../mlkit/md/camera/CameraSourcePreview.kt | 2 +- .../google/mlkit/md/camera/Frame2Processor.kt | 30 + .../mlkit/md/camera/Frame2ProcessorBase.kt | 94 +++ .../mlkit/md/settings/PreferenceUtils.kt | 2 +- .../com/google/mlkit/md/utils/ExifUtils.kt | 73 +++ .../mlkit/md/utils/OrientationLiveData.kt | 95 +++ 12 files changed, 1150 insertions(+), 4 deletions(-) create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt index b18378e097..02daa75994 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/InputInfo.kt @@ -17,10 +17,12 @@ package com.google.mlkit.md import android.graphics.Bitmap +import android.media.Image import com.google.mlkit.md.camera.FrameMetadata import java.nio.ByteBuffer interface InputInfo { + //TODO: Make it optional fun getBitmap(): Bitmap } @@ -42,6 +44,22 @@ class CameraInputInfo( } } +class Camera2InputInfo( + private val frameImage: Image, + private val frameRotation: Int +) : InputInfo { + + private var bitmap: Bitmap? = null + + @Synchronized + override fun getBitmap(): Bitmap { + return bitmap ?: let { + bitmap = Utils.convertToBitmap(frameImage, frameRotation) + bitmap!! + } + } +} + class BitmapInputInfo(private val bitmap: Bitmap) : InputInfo { override fun getBitmap(): Bitmap { return bitmap diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt index e89f438451..10b8c42a47 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt @@ -33,11 +33,15 @@ import android.graphics.Rect import android.graphics.RectF import android.graphics.YuvImage import android.hardware.Camera +import android.hardware.camera2.CameraCharacteristics +import android.media.Image import android.net.Uri import android.util.Log +import android.view.Surface import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat.checkSelfPermission import androidx.exifinterface.media.ExifInterface +import com.google.mlkit.md.camera.Camera2Source import com.google.mlkit.md.camera.CameraSizePair import com.google.mlkit.vision.common.InputImage import java.io.ByteArrayOutputStream @@ -92,6 +96,8 @@ object Utils { context.resources.configuration.orientation == Configuration.ORIENTATION_PORTRAIT /** + * Use [generateValidPreviewSizeList] instead. + * * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size * of the same aspect ratio, the picture size is paired up with the preview size. @@ -101,6 +107,7 @@ object Utils { * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the * preview images may be distorted on some devices. */ + @Deprecated("This method is deprecated.") fun generateValidPreviewSizeList(camera: Camera): List { val parameters = camera.parameters val supportedPreviewSizes = parameters.supportedPreviewSizes @@ -135,6 +142,50 @@ object Utils { return validPreviewSizes } + /** + * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not + * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size + * of the same aspect ratio, the picture size is paired up with the preview size. + * + * + * This is necessary because even if we don't use still pictures, the still picture size must + * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the + * preview images may be distorted on some devices. + */ + fun generateValidPreviewSizeList(characteristics: CameraCharacteristics): List { + + val supportedPreviewSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(Surface::class.java) + val supportedPictureSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(Camera2Source.IMAGE_FORMAT) + val validPreviewSizes = ArrayList() + for (previewSize in supportedPreviewSizes) { + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() + + // By looping through the picture sizes in order, we favor the higher resolutions. + // We choose the highest resolution in order to support taking the full resolution + // picture later. + for (pictureSize in supportedPictureSizes) { + val pictureAspectRatio = pictureSize.width.toFloat() / pictureSize.height.toFloat() + if (abs(previewAspectRatio - pictureAspectRatio) < ASPECT_RATIO_TOLERANCE) { + validPreviewSizes.add(CameraSizePair(previewSize, pictureSize)) + break + } + } + } + + // If there are no picture sizes with the same aspect ratio as any preview sizes, allow all of + // the preview sizes and hope that the camera can handle it. Probably unlikely, but we still + // account for it. + if (validPreviewSizes.isEmpty()) { + Log.w(TAG, "No preview sizes have a corresponding same-aspect-ratio picture size.") + for (previewSize in supportedPreviewSizes) { + // The null picture size will let us know that we shouldn't set a picture size. + validPreviewSizes.add(CameraSizePair(previewSize, null)) + } + } + + return validPreviewSizes + } + fun getCornerRoundedBitmap(srcBitmap: Bitmap, cornerRadius: Int): Bitmap { val dstBitmap = Bitmap.createBitmap(srcBitmap.width, srcBitmap.height, Bitmap.Config.ARGB_8888) val canvas = Canvas(dstBitmap) @@ -171,6 +222,34 @@ object Utils { return null } + fun convertToBitmap(image: Image, rotationDegrees: Int): Bitmap? { + try { + val buffer = image.planes[0].buffer + val bytes = ByteArray(buffer.remaining()).apply { buffer.get(this) } + + BitmapFactory.decodeByteArray(bytes, 0, bytes.size)?.let {bitmap -> + val stream = ByteArrayOutputStream() + val finalBitmap = if (bitmap.compress(Bitmap.CompressFormat.JPEG, 80, stream)){ + BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) + } + else{ + bitmap + } + stream.close() + + // Rotate the image back to straight. + val matrix = Matrix() + matrix.postRotate(rotationDegrees.toFloat()) + return Bitmap.createBitmap(finalBitmap, 0, 0, finalBitmap.width, finalBitmap.height, matrix, true) + } + + + } catch (e: java.lang.Exception) { + Log.e(TAG, "Error: " + e.message) + } + return null + } + internal fun openImagePicker(activity: Activity) { val intent = Intent(Intent.ACTION_GET_CONTENT) intent.addCategory(Intent.CATEGORY_OPENABLE) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt new file mode 100644 index 0000000000..c69b7b531e --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -0,0 +1,609 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.Manifest +import android.content.Context +import android.content.pm.PackageManager +import android.graphics.ImageFormat +import android.hardware.camera2.CameraCaptureSession +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraDevice +import android.hardware.camera2.CameraManager +import android.media.Image +import android.media.ImageReader +import android.os.Handler +import android.os.HandlerThread +import android.util.Log +import android.util.Size +import android.view.Surface +import android.view.SurfaceHolder +import android.view.WindowManager +import androidx.core.app.ActivityCompat +import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.settings.PreferenceUtils +import com.google.mlkit.md.utils.OrientationLiveData +import com.google.mlkit.md.utils.computeExifOrientation +import java.io.IOException +import java.nio.ByteBuffer +import java.util.* +import kotlin.math.abs +import kotlin.math.ceil + +/** + * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This + * receives preview frames from the camera at a specified rate, sends those frames to detector as + * fast as it is able to process. + * + * + * This camera source makes a best effort to manage processing on preview frames as fast as + * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector + * is unable to keep up with the rate of frames generated by the camera. + */ + +//TODO: Remove this interface once start using coroutine suspend functions +interface CameraCreateCallback{ + fun onSuccess(cameraDevice: CameraDevice) + fun onFailure(error: Exception?) +} + +//TODO: Remove this interface once start using coroutine suspend functions +interface CameraSessionCreateCallback{ + fun onSuccess(cameraCaptureSession: CameraCaptureSession) + fun onFailure(error: Exception?) +} + +class Camera2Source(private val graphicOverlay: GraphicOverlay) { + + private val context: Context = graphicOverlay.context + + /** Detects, characterizes, and connects to a CameraDevice (used for all camera operations) */ + private val cameraManager: CameraManager by lazy { + context.getSystemService(Context.CAMERA_SERVICE) as CameraManager + } + + /** [CameraId] corresponding to the provided Camera facing back property */ + private val cameraId: String by lazy { + cameraManager.cameraIdList.forEach { + val characteristics = cameraManager.getCameraCharacteristics(it) + if (characteristics.get(CameraCharacteristics.LENS_FACING) == CAMERA_FACING_BACK){ + return@lazy it + } + } + throw IOException("No Camera found matching the back facing lens $CAMERA_FACING_BACK") + } + + /** [CameraCharacteristics] corresponding to the provided Camera ID */ + private val characteristics: CameraCharacteristics by lazy { + cameraManager.getCameraCharacteristics(cameraId) + } + + /** The [CameraDevice] that will be opened in this fragment */ + private var camera: CameraDevice? = null + + /** Readers used as buffers for camera still shots */ + private var imageReader: ImageReader? = null + + /** Internal reference to the ongoing [CameraCaptureSession] configured with our parameters */ + private var session: CameraCaptureSession? = null + + /** [HandlerThread] where all camera operations run */ + private val cameraThread = HandlerThread("CameraThread").apply { start() } + + /** [Handler] corresponding to [cameraThread] */ + private val cameraHandler = Handler(cameraThread.looper) + + /** [HandlerThread] where all buffer reading operations run */ + private val imageReaderThread = HandlerThread("imageReaderThread").apply { start() } + + /** [Handler] corresponding to [imageReaderThread] */ + private val imageReaderHandler = Handler(imageReaderThread.looper) + + /** Live data property for retrieving the current device orientation relative to the camera or listening to the changes in it */ + private val relativeOrientation: OrientationLiveData by lazy { + OrientationLiveData(context, characteristics) + } + + /** Observer for listening the changes in the [relativeOrientation] live data property */ + private val orientationObserver = androidx.lifecycle.Observer { rotation -> + // Compute EXIF orientation metadata + //val mirrored = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT + exifOrientation = computeExifOrientation(rotation, false) + } + + /** Return the current exif orientation for processing image */ + private var exifOrientation:Int = 0 + + /** Returns the preview size that is currently in use by the underlying camera. */ + internal var previewSize: Size? = null + private set + + /** + * Dedicated thread and associated runnable for calling into the detector with frames, as the + * frames become available from the camera. + */ + private var processingThread: Thread? = null + private val processingRunnable = FrameProcessingRunnable() + + private val processorLock = Object() + private var frameProcessor: Frame2Processor? = null + + /** + * Map to convert between a byte array, received from the camera, and its associated byte buffer. + * We use byte buffers internally because this is a more efficient way to call into native code + * later (avoids a potential copy). + * + * + * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's + * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces + * identity ('==') check on the keys. + */ + private val bytesToByteBuffer = IdentityHashMap() + + /** + * Opens the camera and starts sending preview frames to the underlying detector. The supplied + * surface holder is used for the preview so frames can be displayed to the user. + * + * @param surfaceHolder the surface holder to use for the preview frames. + * @throws Exception if the supplied surface holder could not be used as the preview display. + */ + @Synchronized + @Throws(Exception::class) + internal fun start(surfaceHolder: SurfaceHolder) { + if (camera != null) return + + createCamera(object : CameraCreateCallback{ + override fun onSuccess(cameraDevice: CameraDevice) { + camera = cameraDevice + previewSize = getPreviewAndPictureSize(characteristics).preview.also { previewSize -> + imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also {imageReader -> + createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), object : CameraSessionCreateCallback{ + override fun onSuccess(cameraCaptureSession: CameraCaptureSession) { + session = cameraCaptureSession + startPreview(cameraDevice, surfaceHolder, imageReader, cameraCaptureSession) + relativeOrientation.observeForever(orientationObserver) + } + + override fun onFailure(error: Exception?) { + TODO("Not yet implemented") + } + }, cameraHandler) + } + + + } + } + + override fun onFailure(error: Exception?) { + TODO("Not yet implemented") + } + + }) + + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + } + + /** + * Start the camera preview on the provided surface and process images through image reader buffer + * + * @param cameraDevice the camera device to select a preview from. + * @param surfaceHolder the surface holder to use for the preview frames. + * @param imageReader the image reader for receiving the preview images for processing. + * @param session the configured camera capture session for the camera device. + * @throws Exception if the supplied surface holder could not be used as the preview display. + */ + + @Throws(Exception::class) + private fun startPreview(cameraDevice: CameraDevice, surfaceHolder: SurfaceHolder, imageReader: ImageReader, session: CameraCaptureSession){ + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { + addTarget(surfaceHolder.surface) + // This will keep sending the capture request as frequently as possible until the + // session is torn down or session.stopRepeating() is called + session.setRepeatingRequest(build(), null, cameraHandler) + + //Setup listener for receiving the preview frames for processing + imageReader.setOnImageAvailableListener({ + it.acquireNextImage()?.let {image -> + processingRunnable.setNextFrame(image, exifOrientation) + } + }, imageReaderHandler) + + relativeOrientation.observeForever{rotation -> + + } + } + } + + /** + * Closes the camera and stops sending frames to the underlying frame detector. + * + * + * This camera source may be restarted again by calling [.start]. + * + * + * Call [.release] instead to completely shut down this camera source and release the + * resources of the underlying detector. + */ + @Synchronized + internal fun stop() { + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } + + camera?.let { + it.stopPreview() + it.setPreviewCallbackWithBuffer(null) + try { + it.setPreviewDisplay(null) + } catch (e: Exception) { + Log.e(TAG, "Failed to clear camera preview: $e") + } + it.release() + camera = null + } + + // Release the reference to any image buffers, since these will no longer be in use. + bytesToByteBuffer.clear() + } + + /** Stops the camera and releases the resources of the camera and underlying detector. */ + fun release() { + graphicOverlay.clear() + synchronized(processorLock) { + stop() + frameProcessor?.stop() + } + } + + fun setFrameProcessor(processor: Frame2Processor) { + graphicOverlay.clear() + synchronized(processorLock) { + frameProcessor?.stop() + frameProcessor = processor + } + } + + fun updateFlashMode(flashMode: String) { + val parameters = camera?.parameters + parameters?.flashMode = flashMode + camera?.parameters = parameters + } + + /** + * Opens the camera and applies the user settings. + * + * @throws Exception if camera cannot be found or preview cannot be processed. + */ + @Throws(Exception::class) + private fun createCamera(callback: CameraCreateCallback) { + + if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { + throw IOException("Camera permission not granted") + } + + cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() { + override fun onOpened(camera: CameraDevice) { + callback.onSuccess(camera) + } + + override fun onDisconnected(camera: CameraDevice) { + callback.onFailure(null) + } + + override fun onError(camera: CameraDevice, error: Int) { + val msg = when (error) { + ERROR_CAMERA_DEVICE -> "Fatal (device)" + ERROR_CAMERA_DISABLED -> "Device policy" + ERROR_CAMERA_IN_USE -> "Camera in use" + ERROR_CAMERA_SERVICE -> "Fatal (service)" + ERROR_MAX_CAMERAS_IN_USE -> "Maximum cameras in use" + else -> "Unknown" + } + val exc = IOException("Camera $cameraId error: ($error) $msg") + Log.e(TAG, exc.message, exc) + callback.onFailure(exc) + } + + }, cameraHandler) + + } + + /** + * Starts a [CameraCaptureSession] and returns the configured session as callback [CameraSessionCreateCallback] + * + * @throws Exception if session cannot be created. + */ + @Throws(Exception::class) + private fun createCaptureSession(device: CameraDevice, targets: List, callback: CameraSessionCreateCallback, handler: Handler? = null){ + // Create a capture session using the predefined targets; this also involves defining the + // session state callback to be notified of when the session is ready + device.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() { + + override fun onConfigured(session: CameraCaptureSession) = callback.onSuccess(session) + + override fun onConfigureFailed(session: CameraCaptureSession) { + val exc = RuntimeException("Camera ${device.id} session configuration failed") + Log.e(TAG, exc.message, exc) + callback.onFailure(exc) + } + }, handler) + } + + /** + * Get the most suitable [CameraSizePair] from aspect ratio perspective. + * + * @throws Exception if cannot find a suitable size. + */ + @Throws(Exception::class) + private fun getPreviewAndPictureSize(characteristics: CameraCharacteristics): CameraSizePair { + + // Gives priority to the preview size specified by the user if exists. + val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { + // Camera preview size is based on the landscape mode, so we need to also use the aspect + // ration of display in the same mode for comparison. + val displayAspectRatioInLandscape: Float = + if (Utils.isPortraitMode(graphicOverlay.context)) { + graphicOverlay.height.toFloat() / graphicOverlay.width + } else { + graphicOverlay.width.toFloat() / graphicOverlay.height + } + selectSizePair(characteristics, displayAspectRatioInLandscape) + } ?: throw IOException("Could not find suitable preview size.") + + sizePair.preview.let { + Log.v(TAG, "Camera preview size: $it") + //parameters.setPreviewSize(it.width, it.height) + PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) + } + + sizePair.picture?.let { pictureSize -> + Log.v(TAG, "Camera picture size: $pictureSize") + //parameters.setPictureSize(pictureSize.width, pictureSize.height) + PreferenceUtils.saveStringPreference( + context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() + ) + } + return sizePair + } + + /** + * Creates one buffer for the camera preview callback. The size of the buffer is based off of the + * camera preview size and the format of the camera image. + * + * @return a new preview buffer of the appropriate size for the current camera settings. + */ + private fun createPreviewBuffer(previewSize: Size): ByteArray { + val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) + val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() + val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 + + // Creating the byte array this way and wrapping it, as opposed to using .allocate(), + // should guarantee that there will be an array to work with. + val byteArray = ByteArray(bufferSize) + val byteBuffer = ByteBuffer.wrap(byteArray) + check(!(!byteBuffer.hasArray() || !byteBuffer.array()!!.contentEquals(byteArray))) { + // This should never happen. If it does, then we wouldn't be passing the preview content to + // the underlying detector later. + "Failed to create valid buffer for camera source." + } + + bytesToByteBuffer[byteArray] = byteBuffer + return byteArray + } + + /** + * This runnable controls access to the underlying receiver, calling it to process frames when + * available from the camera. This is designed to run detection on frames as fast as possible + * (i.e., without unnecessary context switching or waiting on the next frame). + * + * + * While detection is running on a frame, new frames may be received from the camera. As these + * frames come in, the most recent frame is held onto as pending. As soon as detection and its + * associated processing is done for the previous frame, detection on the mostly recently received + * frame will immediately start on the same thread. + */ + private inner class FrameProcessingRunnable internal constructor() : Runnable { + + // This lock guards all of the member variables below. + private val lock = Object() + private var active = true + + // These pending variables hold the state associated with the new frame awaiting processing. + private var pendingFrame: Image? = null + private var pendingFrameRotation: Int = 0 + + /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ + internal fun setActive(active: Boolean) { + synchronized(lock) { + this.active = active + lock.notifyAll() + } + } + + /** + * Sets the frame data received from the camera. This adds the previous unused frame buffer (if + * present) back to the camera, and keeps a pending reference to the frame data for future use. + */ + internal fun setNextFrame(image: Image, rotation: Int) { + synchronized(lock) { + pendingFrame?.let { + it.close() + pendingFrame = null + } + + pendingFrame = image + pendingFrameRotation = rotation + + // Notify the processor thread if it is waiting on the next frame (see below). + lock.notifyAll() + } + } + + /** + * As long as the processing thread is active, this executes detection on frames continuously. + * The next pending frame is either immediately available or hasn't been received yet. Once it + * is available, we transfer the frame info to local variables and run detection on that frame. + * It immediately loops back for the next frame without pausing. + * + * + * If detection takes longer than the time in between new frames from the camera, this will + * mean that this loop will run without ever waiting on a frame, avoiding any context switching + * or frame acquisition time latency. + * + * + * If you find that this is using more CPU than you'd like, you should probably decrease the + * FPS setting above to allow for some idle time in between frames. + */ + override fun run() { + var data: Image? + + while (true) { + synchronized(lock) { + while (active && pendingFrame == null) { + try { + // Wait for the next frame to be received from the camera, since we don't have it yet. + lock.wait() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing loop terminated.", e) + return + } + } + + if (!active) { + // Exit the loop once this camera source is stopped or released. We check this here, + // immediately after the wait() above, to handle the case where setActive(false) had + // been called, triggering the termination of this loop. + return + } + + // Hold onto the frame data locally, so that we can use this for detection + // below. We need to clear pendingFrameData to ensure that this buffer isn't + // recycled back to the camera before we are done using that data. + data = pendingFrame + pendingFrame = null + } + + try { + synchronized(processorLock) { + data?.let { + frameProcessor?.process(it, pendingFrameRotation, graphicOverlay) + } + } + } catch (t: Exception) { + Log.e(TAG, "Exception thrown from receiver.", t) + } finally { + data?.close() + } + } + } + } + + companion object { + + const val CAMERA_FACING_BACK = CameraCharacteristics.LENS_FACING_BACK + const val IMAGE_FORMAT = ImageFormat.YUV_420_888 + + private const val TAG = "CameraSource" + + /** Maximum number of images that will be held in the reader's buffer */ + private const val IMAGE_BUFFER_SIZE: Int = 4 + + private const val MIN_CAMERA_PREVIEW_WIDTH = 400 + private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 + private const val REQUESTED_CAMERA_FPS = 30.0f + + /** + * Selects the most suitable preview and picture size, given the display aspect ratio in landscape + * mode. + * + * + * It's firstly trying to pick the one that has closest aspect ratio to display view with its + * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there're multiple candidates, choose the one having longest + * width. + * + * + * If the above looking up failed, chooses the one that has the minimum sum of the differences + * between the desired values and the actual values for width and height. + * + * + * Even though we only need to find the preview size, it's necessary to find both the preview + * size and the picture size of the camera together, because these need to have the same aspect + * ratio. On some hardware, if you would only set the preview size, you will get a distorted + * image. + * + * @param camera the camera to select a preview size from + * @return the selected preview and picture size pair + */ + private fun selectSizePair(characteristics: CameraCharacteristics, displayAspectRatioInLandscape: Float): CameraSizePair? { + val validPreviewSizes = Utils.generateValidPreviewSizeList(characteristics) + + var selectedPair: CameraSizePair? = null + // Picks the preview size that has closest aspect ratio to display view. + var minAspectRatioDiff = Float.MAX_VALUE + + for (sizePair in validPreviewSizes) { + val previewSize = sizePair.preview + if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { + continue + } + + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() + val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { + if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { + selectedPair = sizePair + } + } else if (aspectRatioDiff < minAspectRatioDiff) { + minAspectRatioDiff = aspectRatioDiff + selectedPair = sizePair + } + } + + if (selectedPair == null) { + // Picks the one that has the minimum sum of the differences between the desired values and + // the actual values for width and height. + var minDiff = Integer.MAX_VALUE + for (sizePair in validPreviewSizes) { + val size = sizePair.preview + val diff = + abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + + abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) + if (diff < minDiff) { + selectedPair = sizePair + minDiff = diff + } + } + } + + return selectedPair + } + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt new file mode 100644 index 0000000000..995137573c --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -0,0 +1,148 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.content.Context +import android.util.AttributeSet +import android.util.Log +import android.util.Size +import android.view.SurfaceHolder +import android.view.SurfaceView +import android.widget.FrameLayout +import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import java.io.IOException + +/** Preview the camera image in the screen. */ +class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { + + private val surfaceView: SurfaceView = SurfaceView(context).apply { + holder.addCallback(SurfaceCallback()) + addView(this) + } + private var graphicOverlay: GraphicOverlay? = null + private var startRequested = false + private var surfaceAvailable = false + private var cameraSource: CameraSource? = null + private var cameraPreviewSize: Size? = null + + override fun onFinishInflate() { + super.onFinishInflate() + graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) + } + + @Throws(IOException::class) + fun start(cameraSource: CameraSource) { + this.cameraSource = cameraSource + startRequested = true + startIfReady() + } + + fun stop() { + cameraSource?.let { + it.stop() + cameraSource = null + startRequested = false + } + } + + @Throws(IOException::class) + private fun startIfReady() { + if (startRequested && surfaceAvailable) { + cameraSource?.start(surfaceView.holder) + requestLayout() + graphicOverlay?.let { overlay -> + cameraSource?.let { + overlay.setCameraInfo(it) + } + overlay.clear() + } + startRequested = false + } + } + + override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) { + val layoutWidth = right - left + val layoutHeight = bottom - top + + cameraSource?.previewSize?.let { cameraPreviewSize = it } + + val previewSizeRatio = cameraPreviewSize?.let { size -> + if (Utils.isPortraitMode(context)) { + // Camera's natural orientation is landscape, so need to swap width and height. + size.height.toFloat() / size.width + } else { + size.width.toFloat() / size.height + } + } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) + + // Match the width of the child view to its parent. + val childHeight = (layoutWidth / previewSizeRatio).toInt() + if (childHeight <= layoutHeight) { + for (i in 0 until childCount) { + getChildAt(i).layout(0, 0, layoutWidth, childHeight) + } + } else { + // When the child view is too tall to be fitted in its parent: If the child view is + // static overlay view container (contains views such as bottom prompt chip), we apply + // the size of the parent view to it. Otherwise, we offset the top/bottom position + // equally to position it in the center of the parent. + val excessLenInHalf = (childHeight - layoutHeight) / 2 + for (i in 0 until childCount) { + val childView = getChildAt(i) + when (childView.id) { + R.id.static_overlay_container -> { + childView.layout(0, 0, layoutWidth, layoutHeight) + } + else -> { + childView.layout( + 0, -excessLenInHalf, layoutWidth, layoutHeight + excessLenInHalf + ) + } + } + } + } + + try { + startIfReady() + } catch (e: IOException) { + Log.e(TAG, "Could not start camera source.", e) + } + } + + private inner class SurfaceCallback : SurfaceHolder.Callback { + override fun surfaceCreated(surface: SurfaceHolder) { + surfaceAvailable = true + try { + startIfReady() + } catch (e: IOException) { + Log.e(TAG, "Could not start camera source.", e) + } + } + + override fun surfaceDestroyed(surface: SurfaceHolder) { + surfaceAvailable = false + } + + override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { + } + } + + companion object { + private const val TAG = "CameraSourcePreview" + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt index a3d025296e..492d5f3f0a 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt @@ -17,7 +17,7 @@ package com.google.mlkit.md.camera import android.hardware.Camera -import com.google.android.gms.common.images.Size +import android.util.Size /** * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index 1050b7a27a..5444268654 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -22,10 +22,10 @@ import android.hardware.Camera import android.hardware.Camera.CameraInfo import android.hardware.Camera.Parameters import android.util.Log +import android.util.Size import android.view.Surface import android.view.SurfaceHolder import android.view.WindowManager -import com.google.android.gms.common.images.Size import com.google.mlkit.md.R import com.google.mlkit.md.Utils import com.google.mlkit.md.settings.PreferenceUtils diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt index 3694222f4f..2aeb5e9571 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt @@ -19,10 +19,10 @@ package com.google.mlkit.md.camera import android.content.Context import android.util.AttributeSet import android.util.Log +import android.util.Size import android.view.SurfaceHolder import android.view.SurfaceView import android.widget.FrameLayout -import com.google.android.gms.common.images.Size import com.google.mlkit.md.R import com.google.mlkit.md.Utils import java.io.IOException diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt new file mode 100644 index 0000000000..80700d7e6b --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt @@ -0,0 +1,30 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.media.Image +import java.nio.ByteBuffer + +/** An interface to process the input camera frame and perform detection on it. */ +interface Frame2Processor { + + /** Processes the input frame with the underlying detector. */ + fun process(image: Image, rotation: Int, graphicOverlay: GraphicOverlay) + + /** Stops the underlying detector and release resources. */ + fun stop() +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt new file mode 100644 index 0000000000..f6ccee7f23 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt @@ -0,0 +1,94 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.media.Image +import android.os.SystemClock +import android.util.Log +import androidx.annotation.GuardedBy +import com.google.android.gms.tasks.OnFailureListener +import com.google.android.gms.tasks.Task +import com.google.android.gms.tasks.TaskExecutors +import com.google.mlkit.md.* +import com.google.mlkit.vision.common.InputImage + +/** Abstract base class of [FrameProcessor]. */ +abstract class Frame2ProcessorBase : Frame2Processor { + + // To keep the latest frame and its metadata. + @GuardedBy("this") + private var latestFrame: Image? = null + + @GuardedBy("this") + private var latestFrameRotation: Int? = null + + // To keep the frame and metadata in process. + @GuardedBy("this") + private var processingFrame: Image? = null + + @GuardedBy("this") + private var processingFrameRotation: Int? = null + private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) + + @Synchronized + override fun process(image: Image, rotation: Int, graphicOverlay: GraphicOverlay) { + latestFrame = image + latestFrameRotation = rotation + if (processingFrame == null && processingFrameRotation == null) { + processLatestFrame(graphicOverlay) + } + } + + @Synchronized + private fun processLatestFrame(graphicOverlay: GraphicOverlay) { + processingFrame = latestFrame + processingFrameRotation = latestFrameRotation + latestFrame = null + latestFrameRotation = null + val frame = processingFrame ?: return + val frameRotation = processingFrameRotation ?: return + val image = InputImage.fromMediaImage(frame, frameRotation) + + val startMs = SystemClock.elapsedRealtime() + detectInImage(image) + .addOnSuccessListener(executor) { results: T -> + Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") + this@Frame2ProcessorBase.onSuccess(Camera2InputInfo(frame, frameRotation), results, graphicOverlay) + processLatestFrame(graphicOverlay) + } + .addOnFailureListener(executor) { e -> OnFailureListener { this@Frame2ProcessorBase.onFailure(it) } } + } + + override fun stop() { + executor.shutdown() + } + + protected abstract fun detectInImage(image: InputImage): Task + + /** Be called when the detection succeeds. */ + protected abstract fun onSuccess( + inputInfo: InputInfo, + results: T, + graphicOverlay: GraphicOverlay + ) + + protected abstract fun onFailure(e: Exception) + + companion object { + private const val TAG = "FrameProcessorBase" + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt index 16860cb34b..f1dd7e7788 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt @@ -19,8 +19,8 @@ package com.google.mlkit.md.settings import android.content.Context import android.graphics.RectF import android.preference.PreferenceManager +import android.util.Size import androidx.annotation.StringRes -import com.google.android.gms.common.images.Size import com.google.mlkit.md.R import com.google.mlkit.md.camera.CameraSizePair import com.google.mlkit.md.camera.GraphicOverlay diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt new file mode 100644 index 0000000000..35b20fc082 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/ExifUtils.kt @@ -0,0 +1,73 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.utils + +import android.graphics.Bitmap +import android.graphics.Matrix +import android.util.Log +import androidx.exifinterface.media.ExifInterface + +private const val TAG: String = "ExifUtils" + +/** Transforms rotation and mirroring information into one of the [ExifInterface] constants */ +fun computeExifOrientation(rotationDegrees: Int, mirrored: Boolean) = when { + rotationDegrees == 0 && !mirrored -> ExifInterface.ORIENTATION_NORMAL + rotationDegrees == 0 && mirrored -> ExifInterface.ORIENTATION_FLIP_HORIZONTAL + rotationDegrees == 180 && !mirrored -> ExifInterface.ORIENTATION_ROTATE_180 + rotationDegrees == 180 && mirrored -> ExifInterface.ORIENTATION_FLIP_VERTICAL + rotationDegrees == 270 && mirrored -> ExifInterface.ORIENTATION_TRANSVERSE + rotationDegrees == 90 && !mirrored -> ExifInterface.ORIENTATION_ROTATE_90 + rotationDegrees == 90 && mirrored -> ExifInterface.ORIENTATION_TRANSPOSE + rotationDegrees == 270 && mirrored -> ExifInterface.ORIENTATION_ROTATE_270 + rotationDegrees == 270 && !mirrored -> ExifInterface.ORIENTATION_TRANSVERSE + else -> ExifInterface.ORIENTATION_UNDEFINED +} + +/** + * Helper function used to convert an EXIF orientation enum into a transformation matrix + * that can be applied to a bitmap. + * + * @return matrix - Transformation required to properly display [Bitmap] + */ +fun decodeExifOrientation(exifOrientation: Int): Matrix { + val matrix = Matrix() + + // Apply transformation corresponding to declared EXIF orientation + when (exifOrientation) { + ExifInterface.ORIENTATION_NORMAL -> Unit + ExifInterface.ORIENTATION_UNDEFINED -> Unit + ExifInterface.ORIENTATION_ROTATE_90 -> matrix.postRotate(90F) + ExifInterface.ORIENTATION_ROTATE_180 -> matrix.postRotate(180F) + ExifInterface.ORIENTATION_ROTATE_270 -> matrix.postRotate(270F) + ExifInterface.ORIENTATION_FLIP_HORIZONTAL -> matrix.postScale(-1F, 1F) + ExifInterface.ORIENTATION_FLIP_VERTICAL -> matrix.postScale(1F, -1F) + ExifInterface.ORIENTATION_TRANSPOSE -> { + matrix.postScale(-1F, 1F) + matrix.postRotate(270F) + } + ExifInterface.ORIENTATION_TRANSVERSE -> { + matrix.postScale(-1F, 1F) + matrix.postRotate(90F) + } + + // Error out if the EXIF orientation is invalid + else -> Log.e(TAG, "Invalid orientation: $exifOrientation") + } + + // Return the resulting matrix + return matrix +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt new file mode 100644 index 0000000000..cd96e842d1 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/utils/OrientationLiveData.kt @@ -0,0 +1,95 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.utils + +import android.content.Context +import android.hardware.camera2.CameraCharacteristics +import android.view.OrientationEventListener +import android.view.Surface +import androidx.lifecycle.LiveData + + +/** + * Calculates closest 90-degree orientation to compensate for the device + * rotation relative to sensor orientation, i.e., allows user to see camera + * frames with the expected orientation. + */ +class OrientationLiveData( + context: Context, + characteristics: CameraCharacteristics +): LiveData() { + + private val listener = object : OrientationEventListener(context.applicationContext) { + override fun onOrientationChanged(orientation: Int) { + val rotation = when { + orientation <= 45 -> Surface.ROTATION_0 + orientation <= 135 -> Surface.ROTATION_90 + orientation <= 225 -> Surface.ROTATION_180 + orientation <= 315 -> Surface.ROTATION_270 + else -> Surface.ROTATION_0 + } + val relative = computeRelativeRotation(characteristics, rotation) + if (relative != value) postValue(relative) + } + } + + override fun onActive() { + super.onActive() + listener.enable() + } + + override fun onInactive() { + super.onInactive() + listener.disable() + } + + companion object { + + /** + * Computes rotation required to transform from the camera sensor orientation to the + * device's current orientation in degrees. + * + * @param characteristics the [CameraCharacteristics] to query for the sensor orientation. + * @param surfaceRotation the current device orientation as a Surface constant + * @return the relative rotation from the camera sensor to the current device orientation. + */ + @JvmStatic + private fun computeRelativeRotation( + characteristics: CameraCharacteristics, + surfaceRotation: Int + ): Int { + val sensorOrientationDegrees = + characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! + + val deviceOrientationDegrees = when (surfaceRotation) { + Surface.ROTATION_0 -> 0 + Surface.ROTATION_90 -> 90 + Surface.ROTATION_180 -> 180 + Surface.ROTATION_270 -> 270 + else -> 0 + } + + // Reverse device orientation for front-facing cameras + val sign = if (characteristics.get(CameraCharacteristics.LENS_FACING) == + CameraCharacteristics.LENS_FACING_FRONT) 1 else -1 + + // Calculate desired JPEG orientation relative to camera orientation to make + // the image upright relative to the device orientation + return (sensorOrientationDegrees - (deviceOrientationDegrees * sign) + 360) % 360 + } + } +} From 445812770bb3f18569ab8631e476f0b7bbc98d54 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Thu, 20 Jul 2023 11:05:49 +0530 Subject: [PATCH 03/18] Camera to Camera2 API upgrade related changes done. --- android/material-showcase/app/build.gradle | 4 +- .../mlkit/md/LiveBarcodeScanningActivity.kt | 38 ++-- .../main/java/com/google/mlkit/md/Utils.kt | 3 +- .../md/barcodedetection/Barcode2Processor.kt | 121 +++++++++++ .../google/mlkit/md/camera/Camera2Source.kt | 195 +++++++++--------- .../mlkit/md/camera/Camera2SourcePreview.kt | 52 +++-- .../mlkit/md/camera/Frame2ProcessorBase.kt | 9 +- .../google/mlkit/md/camera/GraphicOverlay.kt | 17 ++ .../main/res/layout/activity_live_barcode.xml | 4 +- 9 files changed, 305 insertions(+), 138 deletions(-) create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt diff --git a/android/material-showcase/app/build.gradle b/android/material-showcase/app/build.gradle index cd8be74044..78210f29c2 100644 --- a/android/material-showcase/app/build.gradle +++ b/android/material-showcase/app/build.gradle @@ -5,8 +5,8 @@ apply plugin: 'kotlin-android-extensions' android { compileSdkVersion 31 defaultConfig { - applicationId "com.google.mlkit.md" - minSdkVersion 19 + applicationId "com.google.mlkit.mdn" + minSdkVersion 21 targetSdkVersion 31 versionCode 1 versionName "1.0" diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt index 1423a8f8cb..82aa1712e4 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt @@ -19,7 +19,6 @@ package com.google.mlkit.md import android.animation.AnimatorInflater import android.animation.AnimatorSet import android.content.Intent -import android.hardware.Camera import android.os.Bundle import android.util.Log import android.view.View @@ -29,23 +28,22 @@ import androidx.lifecycle.Observer import androidx.lifecycle.ViewModelProviders import com.google.android.material.chip.Chip import com.google.common.base.Objects +import com.google.mlkit.md.barcodedetection.Barcode2Processor +import com.google.mlkit.md.barcodedetection.BarcodeField +import com.google.mlkit.md.barcodedetection.BarcodeResultFragment +import com.google.mlkit.md.camera.Camera2Source +import com.google.mlkit.md.camera.Camera2SourcePreview import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState -import com.google.mlkit.md.barcodedetection.BarcodeField -import com.google.mlkit.md.barcodedetection.BarcodeProcessor -import com.google.mlkit.md.barcodedetection.BarcodeResultFragment -import com.google.mlkit.md.camera.CameraSource -import com.google.mlkit.md.camera.CameraSourcePreview import com.google.mlkit.md.settings.SettingsActivity import java.io.IOException -import java.util.ArrayList /** Demonstrates the barcode scanning workflow using camera preview. */ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { - private var cameraSource: CameraSource? = null - private var preview: CameraSourcePreview? = null + private var cameraSource: Camera2Source? = null + private var preview: Camera2SourcePreview? = null private var graphicOverlay: GraphicOverlay? = null private var settingsButton: View? = null private var flashButton: View? = null @@ -61,7 +59,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@LiveBarcodeScanningActivity) - cameraSource = CameraSource(this) + cameraSource = Camera2Source(this) } promptChip = findViewById(R.id.bottom_prompt_chip) @@ -87,7 +85,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { workflowModel?.markCameraFrozen() settingsButton?.isEnabled = true currentWorkflowState = WorkflowState.NOT_STARTED - cameraSource?.setFrameProcessor(BarcodeProcessor(graphicOverlay!!, workflowModel!!)) + cameraSource?.setFrameProcessor(Barcode2Processor(graphicOverlay!!, workflowModel!!)) workflowModel?.setWorkflowState(WorkflowState.DETECTING) } @@ -115,10 +113,10 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { flashButton?.let { if (it.isSelected) { it.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.updateFlashMode(false) } else { it.isSelected = true - cameraSource!!.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource!!.updateFlashMode(true) } } } @@ -149,12 +147,18 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { if (workflowModel.isCameraLive) { workflowModel.markCameraFrozen() flashButton?.isSelected = false - preview?.stop() + try { + preview?.stop() + } + catch (e: Throwable){ + Log.e(TAG, "Failed to stop camera preview: ${e.message}") + } + } } private fun setUpWorkflowModel() { - workflowModel = ViewModelProviders.of(this).get(WorkflowModel::class.java) + workflowModel = ViewModelProviders.of(this)[WorkflowModel::class.java] // Observes the workflow state changes, if happens, update the overlay view indicators and // camera preview state. @@ -197,13 +201,13 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { } }) - workflowModel?.detectedBarcode?.observe(this, Observer { barcode -> + workflowModel?.detectedBarcode?.observe(this) { barcode -> if (barcode != null) { val barcodeFieldList = ArrayList() barcodeFieldList.add(BarcodeField("Raw Value", barcode.rawValue ?: "")) BarcodeResultFragment.show(supportFragmentManager, barcodeFieldList) } - }) + } } companion object { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt index 10b8c42a47..6ddb3038c9 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt @@ -38,6 +38,7 @@ import android.media.Image import android.net.Uri import android.util.Log import android.view.Surface +import android.view.SurfaceHolder import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat.checkSelfPermission import androidx.exifinterface.media.ExifInterface @@ -154,7 +155,7 @@ object Utils { */ fun generateValidPreviewSizeList(characteristics: CameraCharacteristics): List { - val supportedPreviewSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(Surface::class.java) + val supportedPreviewSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(SurfaceHolder::class.java) val supportedPictureSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(Camera2Source.IMAGE_FORMAT) val validPreviewSizes = ArrayList() for (previewSize in supportedPreviewSizes) { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt new file mode 100644 index 0000000000..c797fac4e6 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt @@ -0,0 +1,121 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.barcodedetection + +import android.animation.ValueAnimator +import android.util.Log +import androidx.annotation.MainThread +import com.google.android.gms.tasks.Task +import com.google.mlkit.md.InputInfo +import com.google.mlkit.md.camera.* +import com.google.mlkit.md.camera.WorkflowModel.WorkflowState +import com.google.mlkit.md.settings.PreferenceUtils +import com.google.mlkit.vision.barcode.BarcodeScanning +import com.google.mlkit.vision.barcode.common.Barcode +import com.google.mlkit.vision.common.InputImage +import java.io.IOException + +/** A processor to run the barcode detector. */ +class Barcode2Processor(graphicOverlay: GraphicOverlay, private val workflowModel: WorkflowModel) : + Frame2ProcessorBase>() { + + private val scanner = BarcodeScanning.getClient() + private val cameraReticleAnimator: CameraReticleAnimator = CameraReticleAnimator(graphicOverlay) + + override fun detectInImage(image: InputImage): Task> = + scanner.process(image) + + @MainThread + override fun onSuccess( + inputInfo: InputInfo, + results: List, + graphicOverlay: GraphicOverlay + ) { + + if (!workflowModel.isCameraLive) return + + //Log.d(TAG, "Barcode result size: ${results.size}") + + // Picks the barcode, if exists, that covers the center of graphic overlay. + + val barcodeInCenter = results.firstOrNull { barcode -> + val boundingBox = barcode.boundingBox ?: return@firstOrNull false + val box = graphicOverlay.translateRect(boundingBox) + box.contains(graphicOverlay.width / 2f, graphicOverlay.height / 2f) + } + + graphicOverlay.clear() + if (barcodeInCenter == null) { + cameraReticleAnimator.start() + graphicOverlay.add(BarcodeReticleGraphic(graphicOverlay, cameraReticleAnimator)) + workflowModel.setWorkflowState(WorkflowState.DETECTING) + } else { + cameraReticleAnimator.cancel() + val sizeProgress = PreferenceUtils.getProgressToMeetBarcodeSizeRequirement(graphicOverlay, barcodeInCenter) + if (sizeProgress < 1) { + // Barcode in the camera view is too small, so prompt user to move camera closer. + graphicOverlay.add(BarcodeConfirmingGraphic(graphicOverlay, barcodeInCenter)) + workflowModel.setWorkflowState(WorkflowState.CONFIRMING) + } else { + // Barcode size in the camera view is sufficient. + if (PreferenceUtils.shouldDelayLoadingBarcodeResult(graphicOverlay.context)) { + val loadingAnimator = createLoadingAnimator(graphicOverlay, barcodeInCenter) + loadingAnimator.start() + graphicOverlay.add(BarcodeLoadingGraphic(graphicOverlay, loadingAnimator)) + workflowModel.setWorkflowState(WorkflowState.SEARCHING) + } else { + workflowModel.setWorkflowState(WorkflowState.DETECTED) + workflowModel.detectedBarcode.setValue(barcodeInCenter) + } + } + } + graphicOverlay.invalidate() + } + + private fun createLoadingAnimator(graphicOverlay: GraphicOverlay, barcode: Barcode): ValueAnimator { + val endProgress = 1.1f + return ValueAnimator.ofFloat(0f, endProgress).apply { + duration = 2000 + addUpdateListener { + if ((animatedValue as Float).compareTo(endProgress) >= 0) { + graphicOverlay.clear() + workflowModel.setWorkflowState(WorkflowState.SEARCHED) + workflowModel.detectedBarcode.setValue(barcode) + } else { + graphicOverlay.invalidate() + } + } + } + } + + override fun onFailure(e: Exception) { + Log.e(TAG, "Barcode detection failed!", e) + } + + override fun stop() { + super.stop() + try { + scanner.close() + } catch (e: IOException) { + Log.e(TAG, "Failed to close barcode detector!", e) + } + } + + companion object { + private const val TAG = "BarcodeProcessor" + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index c69b7b531e..259e84a9e2 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -20,10 +20,7 @@ import android.Manifest import android.content.Context import android.content.pm.PackageManager import android.graphics.ImageFormat -import android.hardware.camera2.CameraCaptureSession -import android.hardware.camera2.CameraCharacteristics -import android.hardware.camera2.CameraDevice -import android.hardware.camera2.CameraManager +import android.hardware.camera2.* import android.media.Image import android.media.ImageReader import android.os.Handler @@ -32,7 +29,6 @@ import android.util.Log import android.util.Size import android.view.Surface import android.view.SurfaceHolder -import android.view.WindowManager import androidx.core.app.ActivityCompat import com.google.mlkit.md.R import com.google.mlkit.md.Utils @@ -40,10 +36,8 @@ import com.google.mlkit.md.settings.PreferenceUtils import com.google.mlkit.md.utils.OrientationLiveData import com.google.mlkit.md.utils.computeExifOrientation import java.io.IOException -import java.nio.ByteBuffer import java.util.* import kotlin.math.abs -import kotlin.math.ceil /** * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This @@ -57,17 +51,23 @@ import kotlin.math.ceil */ //TODO: Remove this interface once start using coroutine suspend functions -interface CameraCreateCallback{ +private interface CameraCreateCallback{ fun onSuccess(cameraDevice: CameraDevice) fun onFailure(error: Exception?) } //TODO: Remove this interface once start using coroutine suspend functions -interface CameraSessionCreateCallback{ +private interface CameraSessionCreateCallback{ fun onSuccess(cameraCaptureSession: CameraCaptureSession) fun onFailure(error: Exception?) } +//TODO: Remove this interface once start using coroutine suspend functions +interface CameraStartCallback{ + fun onSuccess() + fun onFailure(error: Exception?) +} + class Camera2Source(private val graphicOverlay: GraphicOverlay) { private val context: Context = graphicOverlay.context @@ -77,7 +77,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { context.getSystemService(Context.CAMERA_SERVICE) as CameraManager } - /** [CameraId] corresponding to the provided Camera facing back property */ + /** [cameraId] corresponding to the provided Camera facing back property */ private val cameraId: String by lazy { cameraManager.cameraIdList.forEach { val characteristics = cameraManager.getCameraCharacteristics(it) @@ -93,12 +93,15 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { cameraManager.getCameraCharacteristics(cameraId) } - /** The [CameraDevice] that will be opened in this fragment */ + /** The [CameraDevice] that will be used for preview */ private var camera: CameraDevice? = null - /** Readers used as buffers for camera still shots */ + /** The [ImageReader] that will used for reading image frame buffers */ private var imageReader: ImageReader? = null + /** The [CaptureRequest.Builder] that will be used for session */ + private var captureRequest: CaptureRequest.Builder? = null + /** Internal reference to the ongoing [CameraCaptureSession] configured with our parameters */ private var session: CameraCaptureSession? = null @@ -121,13 +124,11 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { /** Observer for listening the changes in the [relativeOrientation] live data property */ private val orientationObserver = androidx.lifecycle.Observer { rotation -> - // Compute EXIF orientation metadata - //val mirrored = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT - exifOrientation = computeExifOrientation(rotation, false) + Log.d(TAG, "Orientation changed: $rotation") } /** Return the current exif orientation for processing image */ - private var exifOrientation:Int = 0 + //private var rotationDegrees:Int = 0 /** Returns the preview size that is currently in use by the underlying camera. */ internal var previewSize: Size? = null @@ -143,18 +144,6 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private val processorLock = Object() private var frameProcessor: Frame2Processor? = null - /** - * Map to convert between a byte array, received from the camera, and its associated byte buffer. - * We use byte buffers internally because this is a more efficient way to call into native code - * later (avoids a potential copy). - * - * - * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's - * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces - * identity ('==') check on the keys. - */ - private val bytesToByteBuffer = IdentityHashMap() - /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. @@ -164,7 +153,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { */ @Synchronized @Throws(Exception::class) - internal fun start(surfaceHolder: SurfaceHolder) { + internal fun start(surfaceHolder: SurfaceHolder, callback: CameraStartCallback) { if (camera != null) return createCamera(object : CameraCreateCallback{ @@ -175,12 +164,17 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), object : CameraSessionCreateCallback{ override fun onSuccess(cameraCaptureSession: CameraCaptureSession) { session = cameraCaptureSession - startPreview(cameraDevice, surfaceHolder, imageReader, cameraCaptureSession) - relativeOrientation.observeForever(orientationObserver) + captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { + addTarget(surfaceHolder.surface) + //addTarget(imageReader.surface) + startPreview( this, imageReader, cameraCaptureSession) + callback.onSuccess() + } + } override fun onFailure(error: Exception?) { - TODO("Not yet implemented") + callback.onFailure(error) } }, cameraHandler) } @@ -190,7 +184,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } override fun onFailure(error: Exception?) { - TODO("Not yet implemented") + callback.onFailure(error) } }) @@ -199,37 +193,48 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { processingRunnable.setActive(true) start() } + + relativeOrientation.observeForever(orientationObserver) } /** * Start the camera preview on the provided surface and process images through image reader buffer * - * @param cameraDevice the camera device to select a preview from. - * @param surfaceHolder the surface holder to use for the preview frames. + * @param captureRequest the capture request builder to use for the session. * @param imageReader the image reader for receiving the preview images for processing. * @param session the configured camera capture session for the camera device. + * * @throws Exception if the supplied surface holder could not be used as the preview display. */ @Throws(Exception::class) - private fun startPreview(cameraDevice: CameraDevice, surfaceHolder: SurfaceHolder, imageReader: ImageReader, session: CameraCaptureSession){ - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { - addTarget(surfaceHolder.surface) - // This will keep sending the capture request as frequently as possible until the - // session is torn down or session.stopRepeating() is called - session.setRepeatingRequest(build(), null, cameraHandler) - - //Setup listener for receiving the preview frames for processing - imageReader.setOnImageAvailableListener({ - it.acquireNextImage()?.let {image -> - processingRunnable.setNextFrame(image, exifOrientation) - } - }, imageReaderHandler) + private fun startPreview(captureRequest: CaptureRequest.Builder, imageReader: ImageReader, session: CameraCaptureSession){ + // This will keep sending the capture request as frequently as possible until the + // session is torn down or session.stopRepeating() is called + session.setRepeatingRequest(captureRequest.build(), null, cameraHandler) + + //Setup listener for receiving the preview frames for processing + imageReader.setOnImageAvailableListener({ + it.acquireNextImage()?.let {image -> + val rotation = relativeOrientation.value ?: 0 + processingRunnable.setNextFrame(image, rotation) + } + }, imageReaderHandler) - relativeOrientation.observeForever{rotation -> + } - } - } + /** + * Update the camera preview with the changes in the capture request builder + * + * @param captureRequest the capture request builder to use for the session. + * @param session the configured camera capture session for the camera device. + * + * @throws Exception if the supplied surface holder could not be used as the preview display. + * + * */ + @Throws(Exception::class) + private fun updatePreview(captureRequest: CaptureRequest.Builder, session: CameraCaptureSession){ + session.setRepeatingRequest(captureRequest.build(), null, cameraHandler) } /** @@ -243,7 +248,9 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * resources of the underlying detector. */ @Synchronized + @Throws(Exception::class) internal fun stop() { + Log.d(TAG, "Stop is called") processingRunnable.setActive(false) processingThread?.let { try { @@ -256,20 +263,27 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { processingThread = null } + // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. + imageReader?.let { + it.setOnImageAvailableListener(null, null) + imageReader = null + } + relativeOrientation.removeObserver(orientationObserver) + + /* session?.let { + it.stopRepeating() + it.close() + session = null + }*/ + camera?.let { - it.stopPreview() - it.setPreviewCallbackWithBuffer(null) - try { - it.setPreviewDisplay(null) - } catch (e: Exception) { - Log.e(TAG, "Failed to clear camera preview: $e") - } - it.release() + it.close() camera = null } - // Release the reference to any image buffers, since these will no longer be in use. - bytesToByteBuffer.clear() + cameraThread.quitSafely() + imageReaderThread.quitSafely() + } /** Stops the camera and releases the resources of the camera and underlying detector. */ @@ -289,10 +303,22 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } } - fun updateFlashMode(flashMode: String) { - val parameters = camera?.parameters - parameters?.flashMode = flashMode - camera?.parameters = parameters + fun updateFlashMode(enabled: Boolean) { + val flashAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) as Boolean + if(flashAvailable){ + /*if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + cameraManager.setTorchMode(cameraId, enabled) + } + else{*/ + session?.let {session -> + captureRequest?.let { captureRequest -> + captureRequest.set(CaptureRequest.FLASH_MODE, + if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF) + updatePreview(captureRequest, session) + } + } + /*}*/ + } } /** @@ -392,30 +418,6 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { return sizePair } - /** - * Creates one buffer for the camera preview callback. The size of the buffer is based off of the - * camera preview size and the format of the camera image. - * - * @return a new preview buffer of the appropriate size for the current camera settings. - */ - private fun createPreviewBuffer(previewSize: Size): ByteArray { - val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) - val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() - val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 - - // Creating the byte array this way and wrapping it, as opposed to using .allocate(), - // should guarantee that there will be an array to work with. - val byteArray = ByteArray(bufferSize) - val byteBuffer = ByteBuffer.wrap(byteArray) - check(!(!byteBuffer.hasArray() || !byteBuffer.array()!!.contentEquals(byteArray))) { - // This should never happen. If it does, then we wouldn't be passing the preview content to - // the underlying detector later. - "Failed to create valid buffer for camera source." - } - - bytesToByteBuffer[byteArray] = byteBuffer - return byteArray - } /** * This runnable controls access to the underlying receiver, calling it to process frames when @@ -428,7 +430,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * associated processing is done for the previous frame, detection on the mostly recently received * frame will immediately start on the same thread. */ - private inner class FrameProcessingRunnable internal constructor() : Runnable { + private inner class FrameProcessingRunnable : Runnable { // This lock guards all of the member variables below. private val lock = Object() @@ -439,7 +441,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private var pendingFrameRotation: Int = 0 /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ - internal fun setActive(active: Boolean) { + fun setActive(active: Boolean) { synchronized(lock) { this.active = active lock.notifyAll() @@ -450,7 +452,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * Sets the frame data received from the camera. This adds the previous unused frame buffer (if * present) back to the camera, and keeps a pending reference to the frame data for future use. */ - internal fun setNextFrame(image: Image, rotation: Int) { + fun setNextFrame(image: Image, rotation: Int) { synchronized(lock) { pendingFrame?.let { it.close() @@ -512,13 +514,15 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { try { synchronized(processorLock) { data?.let { + //Log.d(TAG, "Processing Next Frame ${it.width} x ${it.height}") frameProcessor?.process(it, pendingFrameRotation, graphicOverlay) } } } catch (t: Exception) { Log.e(TAG, "Exception thrown from receiver.", t) } finally { - data?.close() + //Let the processor close image as it's required until frame is processed + //data?.close() } } } @@ -538,7 +542,6 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 - private const val REQUESTED_CAMERA_FPS = 30.0f /** * Selects the most suitable preview and picture size, given the display aspect ratio in landscape @@ -546,7 +549,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * * * It's firstly trying to pick the one that has closest aspect ratio to display view with its - * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there're multiple candidates, choose the one having longest + * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there are multiple candidates, choose the one having longest * width. * * @@ -559,7 +562,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * ratio. On some hardware, if you would only set the preview size, you will get a distorted * image. * - * @param camera the camera to select a preview size from + * @param characteristics the selected camera characteristics to select a preview size from * @return the selected preview and picture size pair */ private fun selectSizePair(characteristics: CameraCharacteristics, displayAspectRatioInLandscape: Float): CameraSizePair? { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index 995137573c..f0ec2dc6ce 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -29,15 +29,16 @@ import java.io.IOException /** Preview the camera image in the screen. */ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { - + private val surfaceView: SurfaceView = SurfaceView(context).apply { holder.addCallback(SurfaceCallback()) addView(this) } private var graphicOverlay: GraphicOverlay? = null private var startRequested = false + private var startProcessing = false private var surfaceAvailable = false - private var cameraSource: CameraSource? = null + private var cameraSource: Camera2Source? = null private var cameraPreviewSize: Size? = null override fun onFinishInflate() { @@ -45,13 +46,14 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) } - @Throws(IOException::class) - fun start(cameraSource: CameraSource) { + @Throws(Exception::class) + fun start(cameraSource: Camera2Source) { this.cameraSource = cameraSource startRequested = true startIfReady() } + @Throws(Exception::class) fun stop() { cameraSource?.let { it.stop() @@ -60,18 +62,34 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( } } - @Throws(IOException::class) + @Throws(Exception::class) private fun startIfReady() { - if (startRequested && surfaceAvailable) { - cameraSource?.start(surfaceView.holder) - requestLayout() - graphicOverlay?.let { overlay -> - cameraSource?.let { - overlay.setCameraInfo(it) + if (startRequested && surfaceAvailable && !startProcessing) { + startProcessing = true + Log.d(TAG, "Starting camera") + cameraSource?.start(surfaceView.holder, object : CameraStartCallback{ + override fun onSuccess() { + post { + /*requestLayout() + graphicOverlay?.let { overlay -> + cameraSource?.let { + overlay.setCameraInfo(it) + } + overlay.clear() + }*/ + startRequested = false + startProcessing = false + } + } - overlay.clear() - } - startRequested = false + + override fun onFailure(error: Exception?) { + startRequested = false + startProcessing = false + } + + }) + } } @@ -88,7 +106,7 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( } else { size.width.toFloat() / size.height } - } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) + } ?: layoutWidth.toFloat() / layoutHeight.toFloat() // Match the width of the child view to its parent. val childHeight = (layoutWidth / previewSizeRatio).toInt() @@ -119,7 +137,7 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } @@ -129,7 +147,7 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( surfaceAvailable = true try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt index f6ccee7f23..faf7b231b1 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt @@ -55,6 +55,7 @@ abstract class Frame2ProcessorBase : Frame2Processor { @Synchronized private fun processLatestFrame(graphicOverlay: GraphicOverlay) { + processingFrame?.close() processingFrame = latestFrame processingFrameRotation = latestFrameRotation latestFrame = null @@ -62,15 +63,17 @@ abstract class Frame2ProcessorBase : Frame2Processor { val frame = processingFrame ?: return val frameRotation = processingFrameRotation ?: return val image = InputImage.fromMediaImage(frame, frameRotation) - val startMs = SystemClock.elapsedRealtime() detectInImage(image) .addOnSuccessListener(executor) { results: T -> - Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") + //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") this@Frame2ProcessorBase.onSuccess(Camera2InputInfo(frame, frameRotation), results, graphicOverlay) processLatestFrame(graphicOverlay) } - .addOnFailureListener(executor) { e -> OnFailureListener { this@Frame2ProcessorBase.onFailure(it) } } + .addOnFailureListener(executor) { e -> OnFailureListener { + Log.d(TAG, "Detect In Image Failure: ${e.message}") + this@Frame2ProcessorBase.onFailure(it) } + } } override fun stop() { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt index 4b18a19279..9fbd32c21e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt @@ -78,6 +78,7 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr * Sets the camera attributes for size and facing direction, which informs how to transform image * coordinates later. */ + @Deprecated("This method is deprecated. Use setCameraInfo(cameraSource: Camera2Source) instead") fun setCameraInfo(cameraSource: CameraSource) { val previewSize = cameraSource.previewSize ?: return if (Utils.isPortraitMode(context)) { @@ -90,6 +91,22 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr } } + /** + * Sets the camera attributes for size and facing direction, which informs how to transform image + * coordinates later. + */ + fun setCameraInfo(cameraSource: Camera2Source) { + val previewSize = cameraSource.previewSize ?: return + /*if (Utils.isPortraitMode(context)) { + // Swap width and height when in portrait, since camera's natural orientation is landscape. + previewWidth = previewSize.height + previewHeight = previewSize.width + } else {*/ + previewWidth = previewSize.width + previewHeight = previewSize.height + /*}*/ + } + fun translateX(x: Float): Float = x * widthScaleFactor fun translateY(y: Float): Float = y * heightScaleFactor diff --git a/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml b/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml index e51b915c91..aed19dc8da 100644 --- a/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml +++ b/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml @@ -5,14 +5,14 @@ android:layout_height="match_parent" android:keepScreenOn="true"> - - + Date: Sun, 30 Jul 2023 14:38:24 +0530 Subject: [PATCH 04/18] Gradle library updated to version 7.2 --- .../gradle/wrapper/gradle-wrapper.jar | Bin 54329 -> 59536 bytes .../gradle/wrapper/gradle-wrapper.properties | 3 +- android/material-showcase/gradlew | 282 +++++++++++------- android/material-showcase/gradlew.bat | 43 +-- 4 files changed, 197 insertions(+), 131 deletions(-) diff --git a/android/material-showcase/gradle/wrapper/gradle-wrapper.jar b/android/material-showcase/gradle/wrapper/gradle-wrapper.jar index f6b961fd5a86aa5fbfe90f707c3138408be7c718..7454180f2ae8848c63b8b4dea2cb829da983f2fa 100644 GIT binary patch literal 59536 zcma&NbC71ylI~qywr$(CZQJHswz}-9F59+k+g;UV+cs{`J?GrGXYR~=-ydruB3JCa zB64N^cILAcWk5iofq)<(fq;O7{th4@;QxID0)qN`mJ?GIqLY#rX8-|G{5M0pdVW5^ zzXk$-2kQTAC?_N@B`&6-N-rmVFE=$QD?>*=4<|!MJu@}isLc4AW#{m2if&A5T5g&~ ziuMQeS*U5sL6J698wOd)K@oK@1{peP5&Esut<#VH^u)gp`9H4)`uE!2$>RTctN+^u z=ASkePDZA-X8)rp%D;p*~P?*a_=*Kwc<^>QSH|^<0>o37lt^+Mj1;4YvJ(JR-Y+?%Nu}JAYj5 z_Qc5%Ao#F?q32i?ZaN2OSNhWL;2oDEw_({7ZbgUjna!Fqn3NzLM@-EWFPZVmc>(fZ z0&bF-Ch#p9C{YJT9Rcr3+Y_uR^At1^BxZ#eo>$PLJF3=;t_$2|t+_6gg5(j{TmjYU zK12c&lE?Eh+2u2&6Gf*IdKS&6?rYbSEKBN!rv{YCm|Rt=UlPcW9j`0o6{66#y5t9C zruFA2iKd=H%jHf%ypOkxLnO8#H}#Zt{8p!oi6)7#NqoF({t6|J^?1e*oxqng9Q2Cc zg%5Vu!em)}Yuj?kaP!D?b?(C*w!1;>R=j90+RTkyEXz+9CufZ$C^umX^+4|JYaO<5 zmIM3#dv`DGM;@F6;(t!WngZSYzHx?9&$xEF70D1BvfVj<%+b#)vz)2iLCrTeYzUcL z(OBnNoG6Le%M+@2oo)&jdOg=iCszzv59e zDRCeaX8l1hC=8LbBt|k5?CXgep=3r9BXx1uR8!p%Z|0+4Xro=xi0G!e{c4U~1j6!) zH6adq0}#l{%*1U(Cb%4AJ}VLWKBPi0MoKFaQH6x?^hQ!6em@993xdtS%_dmevzeNl z(o?YlOI=jl(`L9^ z0O+H9k$_@`6L13eTT8ci-V0ljDMD|0ifUw|Q-Hep$xYj0hTO@0%IS^TD4b4n6EKDG z??uM;MEx`s98KYN(K0>c!C3HZdZ{+_53DO%9k5W%pr6yJusQAv_;IA}925Y%;+!tY z%2k!YQmLLOr{rF~!s<3-WEUs)`ix_mSU|cNRBIWxOox_Yb7Z=~Q45ZNe*u|m^|)d* zog=i>`=bTe!|;8F+#H>EjIMcgWcG2ORD`w0WD;YZAy5#s{65~qfI6o$+Ty&-hyMyJ z3Ra~t>R!p=5ZpxA;QkDAoPi4sYOP6>LT+}{xp}tk+<0k^CKCFdNYG(Es>p0gqD)jP zWOeX5G;9(m@?GOG7g;e74i_|SmE?`B2i;sLYwRWKLy0RLW!Hx`=!LH3&k=FuCsM=9M4|GqzA)anEHfxkB z?2iK-u(DC_T1};KaUT@3nP~LEcENT^UgPvp!QC@Dw&PVAhaEYrPey{nkcn(ro|r7XUz z%#(=$7D8uP_uU-oPHhd>>^adbCSQetgSG`e$U|7mr!`|bU0aHl_cmL)na-5x1#OsVE#m*+k84Y^+UMeSAa zbrVZHU=mFwXEaGHtXQq`2ZtjfS!B2H{5A<3(nb-6ARVV8kEmOkx6D2x7~-6hl;*-*}2Xz;J#a8Wn;_B5=m zl3dY;%krf?i-Ok^Pal-}4F`{F@TYPTwTEhxpZK5WCpfD^UmM_iYPe}wpE!Djai6_{ z*pGO=WB47#Xjb7!n2Ma)s^yeR*1rTxp`Mt4sfA+`HwZf%!7ZqGosPkw69`Ix5Ku6G z@Pa;pjzV&dn{M=QDx89t?p?d9gna*}jBly*#1!6}5K<*xDPJ{wv4& zM$17DFd~L*Te3A%yD;Dp9UGWTjRxAvMu!j^Tbc}2v~q^59d4bz zvu#!IJCy(BcWTc`;v$9tH;J%oiSJ_i7s;2`JXZF+qd4C)vY!hyCtl)sJIC{ebI*0> z@x>;EzyBv>AI-~{D6l6{ST=em*U( z(r$nuXY-#CCi^8Z2#v#UXOt`dbYN1z5jzNF2 z411?w)whZrfA20;nl&C1Gi+gk<`JSm+{|*2o<< zqM#@z_D`Cn|0H^9$|Tah)0M_X4c37|KQ*PmoT@%xHc3L1ZY6(p(sNXHa&49Frzto& zR`c~ClHpE~4Z=uKa5S(-?M8EJ$zt0&fJk~p$M#fGN1-y$7!37hld`Uw>Urri(DxLa;=#rK0g4J)pXMC zxzraOVw1+kNWpi#P=6(qxf`zSdUC?D$i`8ZI@F>k6k zz21?d+dw7b&i*>Kv5L(LH-?J%@WnqT7j#qZ9B>|Zl+=> z^U-pV@1y_ptHo4hl^cPRWewbLQ#g6XYQ@EkiP z;(=SU!yhjHp%1&MsU`FV1Z_#K1&(|5n(7IHbx&gG28HNT)*~-BQi372@|->2Aw5It z0CBpUcMA*QvsPy)#lr!lIdCi@1k4V2m!NH)%Px(vu-r(Q)HYc!p zJ^$|)j^E#q#QOgcb^pd74^JUi7fUmMiNP_o*lvx*q%_odv49Dsv$NV;6J z9GOXKomA{2Pb{w}&+yHtH?IkJJu~}Z?{Uk++2mB8zyvh*xhHKE``99>y#TdD z&(MH^^JHf;g(Tbb^&8P*;_i*2&fS$7${3WJtV7K&&(MBV2~)2KB3%cWg#1!VE~k#C z!;A;?p$s{ihyojEZz+$I1)L}&G~ml=udD9qh>Tu(ylv)?YcJT3ihapi!zgPtWb*CP zlLLJSRCj-^w?@;RU9aL2zDZY1`I3d<&OMuW=c3$o0#STpv_p3b9Wtbql>w^bBi~u4 z3D8KyF?YE?=HcKk!xcp@Cigvzy=lnFgc^9c%(^F22BWYNAYRSho@~*~S)4%AhEttv zvq>7X!!EWKG?mOd9&n>vvH1p4VzE?HCuxT-u+F&mnsfDI^}*-d00-KAauEaXqg3k@ zy#)MGX!X;&3&0s}F3q40ZmVM$(H3CLfpdL?hB6nVqMxX)q=1b}o_PG%r~hZ4gUfSp zOH4qlEOW4OMUc)_m)fMR_rl^pCfXc{$fQbI*E&mV77}kRF z&{<06AJyJ!e863o-V>FA1a9Eemx6>^F$~9ppt()ZbPGfg_NdRXBWoZnDy2;#ODgf! zgl?iOcF7Meo|{AF>KDwTgYrJLb$L2%%BEtO>T$C?|9bAB&}s;gI?lY#^tttY&hfr# zKhC+&b-rpg_?~uVK%S@mQleU#_xCsvIPK*<`E0fHE1&!J7!xD#IB|SSPW6-PyuqGn3^M^Rz%WT{e?OI^svARX&SAdU77V(C~ zM$H{Kg59op{<|8ry9ecfP%=kFm(-!W&?U0@<%z*+!*<e0XesMxRFu9QnGqun6R_%T+B%&9Dtk?*d$Q zb~>84jEAPi@&F@3wAa^Lzc(AJz5gsfZ7J53;@D<;Klpl?sK&u@gie`~vTsbOE~Cd4 z%kr56mI|#b(Jk&;p6plVwmNB0H@0SmgdmjIn5Ne@)}7Vty(yb2t3ev@22AE^s!KaN zyQ>j+F3w=wnx7w@FVCRe+`vUH)3gW%_72fxzqX!S&!dchdkRiHbXW1FMrIIBwjsai8`CB2r4mAbwp%rrO>3B$Zw;9=%fXI9B{d(UzVap7u z6piC-FQ)>}VOEuPpuqznpY`hN4dGa_1Xz9rVg(;H$5Te^F0dDv*gz9JS<|>>U0J^# z6)(4ICh+N_Q`Ft0hF|3fSHs*?a=XC;e`sJaU9&d>X4l?1W=|fr!5ShD|nv$GK;j46@BV6+{oRbWfqOBRb!ir88XD*SbC(LF}I1h#6@dvK%Toe%@ zhDyG$93H8Eu&gCYddP58iF3oQH*zLbNI;rN@E{T9%A8!=v#JLxKyUe}e}BJpB{~uN zqgxRgo0*-@-iaHPV8bTOH(rS(huwK1Xg0u+e!`(Irzu@Bld&s5&bWgVc@m7;JgELd zimVs`>vQ}B_1(2#rv#N9O`fJpVfPc7V2nv34PC);Dzbb;p!6pqHzvy?2pD&1NE)?A zt(t-ucqy@wn9`^MN5apa7K|L=9>ISC>xoc#>{@e}m#YAAa1*8-RUMKwbm|;5p>T`Z zNf*ph@tnF{gmDa3uwwN(g=`Rh)4!&)^oOy@VJaK4lMT&5#YbXkl`q?<*XtsqD z9PRK6bqb)fJw0g-^a@nu`^?71k|m3RPRjt;pIkCo1{*pdqbVs-Yl>4E>3fZx3Sv44grW=*qdSoiZ9?X0wWyO4`yDHh2E!9I!ZFi zVL8|VtW38}BOJHW(Ax#KL_KQzarbuE{(%TA)AY)@tY4%A%P%SqIU~8~-Lp3qY;U-} z`h_Gel7;K1h}7$_5ZZT0&%$Lxxr-<89V&&TCsu}LL#!xpQ1O31jaa{U34~^le*Y%L za?7$>Jk^k^pS^_M&cDs}NgXlR>16AHkSK-4TRaJSh#h&p!-!vQY%f+bmn6x`4fwTp z$727L^y`~!exvmE^W&#@uY!NxJi`g!i#(++!)?iJ(1)2Wk;RN zFK&O4eTkP$Xn~4bB|q8y(btx$R#D`O@epi4ofcETrx!IM(kWNEe42Qh(8*KqfP(c0 zouBl6>Fc_zM+V;F3znbo{x#%!?mH3`_ANJ?y7ppxS@glg#S9^MXu|FM&ynpz3o&Qh z2ujAHLF3($pH}0jXQsa#?t--TnF1P73b?4`KeJ9^qK-USHE)4!IYgMn-7z|=ALF5SNGkrtPG@Y~niUQV2?g$vzJN3nZ{7;HZHzWAeQ;5P|@Tl3YHpyznGG4-f4=XflwSJY+58-+wf?~Fg@1p1wkzuu-RF3j2JX37SQUc? zQ4v%`V8z9ZVZVqS8h|@@RpD?n0W<=hk=3Cf8R?d^9YK&e9ZybFY%jdnA)PeHvtBe- zhMLD+SSteHBq*q)d6x{)s1UrsO!byyLS$58WK;sqip$Mk{l)Y(_6hEIBsIjCr5t>( z7CdKUrJTrW%qZ#1z^n*Lb8#VdfzPw~OIL76aC+Rhr<~;4Tl!sw?Rj6hXj4XWa#6Tp z@)kJ~qOV)^Rh*-?aG>ic2*NlC2M7&LUzc9RT6WM%Cpe78`iAowe!>(T0jo&ivn8-7 zs{Qa@cGy$rE-3AY0V(l8wjI^uB8Lchj@?L}fYal^>T9z;8juH@?rG&g-t+R2dVDBe zq!K%{e-rT5jX19`(bP23LUN4+_zh2KD~EAYzhpEO3MUG8@}uBHH@4J zd`>_(K4q&>*k82(dDuC)X6JuPrBBubOg7qZ{?x!r@{%0);*`h*^F|%o?&1wX?Wr4b z1~&cy#PUuES{C#xJ84!z<1tp9sfrR(i%Tu^jnXy;4`Xk;AQCdFC@?V%|; zySdC7qS|uQRcH}EFZH%mMB~7gi}a0utE}ZE_}8PQH8f;H%PN41Cb9R%w5Oi5el^fd z$n{3SqLCnrF##x?4sa^r!O$7NX!}&}V;0ZGQ&K&i%6$3C_dR%I7%gdQ;KT6YZiQrW zk%q<74oVBV>@}CvJ4Wj!d^?#Zwq(b$E1ze4$99DuNg?6t9H}k_|D7KWD7i0-g*EO7 z;5{hSIYE4DMOK3H%|f5Edx+S0VI0Yw!tsaRS2&Il2)ea^8R5TG72BrJue|f_{2UHa z@w;^c|K3da#$TB0P3;MPlF7RuQeXT$ zS<<|C0OF(k)>fr&wOB=gP8!Qm>F41u;3esv7_0l%QHt(~+n; zf!G6%hp;Gfa9L9=AceiZs~tK+Tf*Wof=4!u{nIO90jH@iS0l+#%8=~%ASzFv7zqSB^?!@N7)kp0t&tCGLmzXSRMRyxCmCYUD2!B`? zhs$4%KO~m=VFk3Buv9osha{v+mAEq=ik3RdK@;WWTV_g&-$U4IM{1IhGX{pAu%Z&H zFfwCpUsX%RKg);B@7OUzZ{Hn{q6Vv!3#8fAg!P$IEx<0vAx;GU%}0{VIsmFBPq_mb zpe^BChDK>sc-WLKl<6 zwbW|e&d&dv9Wu0goueyu>(JyPx1mz0v4E?cJjFuKF71Q1)AL8jHO$!fYT3(;U3Re* zPPOe%*O+@JYt1bW`!W_1!mN&=w3G9ru1XsmwfS~BJ))PhD(+_J_^N6j)sx5VwbWK| zwRyC?W<`pOCY)b#AS?rluxuuGf-AJ=D!M36l{ua?@SJ5>e!IBr3CXIxWw5xUZ@Xrw z_R@%?{>d%Ld4p}nEsiA@v*nc6Ah!MUs?GA7e5Q5lPpp0@`%5xY$C;{%rz24$;vR#* zBP=a{)K#CwIY%p} zXVdxTQ^HS@O&~eIftU+Qt^~(DGxrdi3k}DdT^I7Iy5SMOp$QuD8s;+93YQ!OY{eB24%xY7ml@|M7I(Nb@K_-?F;2?et|CKkuZK_>+>Lvg!>JE~wN`BI|_h6$qi!P)+K-1Hh(1;a`os z55)4Q{oJiA(lQM#;w#Ta%T0jDNXIPM_bgESMCDEg6rM33anEr}=|Fn6)|jBP6Y}u{ zv9@%7*#RI9;fv;Yii5CI+KrRdr0DKh=L>)eO4q$1zmcSmglsV`*N(x=&Wx`*v!!hn6X-l0 zP_m;X??O(skcj+oS$cIdKhfT%ABAzz3w^la-Ucw?yBPEC+=Pe_vU8nd-HV5YX6X8r zZih&j^eLU=%*;VzhUyoLF;#8QsEfmByk+Y~caBqSvQaaWf2a{JKB9B>V&r?l^rXaC z8)6AdR@Qy_BxQrE2Fk?ewD!SwLuMj@&d_n5RZFf7=>O>hzVE*seW3U?_p|R^CfoY`?|#x9)-*yjv#lo&zP=uI`M?J zbzC<^3x7GfXA4{FZ72{PE*-mNHyy59Q;kYG@BB~NhTd6pm2Oj=_ zizmD?MKVRkT^KmXuhsk?eRQllPo2Ubk=uCKiZ&u3Xjj~<(!M94c)Tez@9M1Gfs5JV z->@II)CDJOXTtPrQudNjE}Eltbjq>6KiwAwqvAKd^|g!exgLG3;wP+#mZYr`cy3#39e653d=jrR-ulW|h#ddHu(m9mFoW~2yE zz5?dB%6vF}+`-&-W8vy^OCxm3_{02royjvmwjlp+eQDzFVEUiyO#gLv%QdDSI#3W* z?3!lL8clTaNo-DVJw@ynq?q!%6hTQi35&^>P85G$TqNt78%9_sSJt2RThO|JzM$iL zg|wjxdMC2|Icc5rX*qPL(coL!u>-xxz-rFiC!6hD1IR%|HSRsV3>Kq~&vJ=s3M5y8SG%YBQ|{^l#LGlg!D?E>2yR*eV%9m$_J6VGQ~AIh&P$_aFbh zULr0Z$QE!QpkP=aAeR4ny<#3Fwyw@rZf4?Ewq`;mCVv}xaz+3ni+}a=k~P+yaWt^L z@w67!DqVf7D%7XtXX5xBW;Co|HvQ8WR1k?r2cZD%U;2$bsM%u8{JUJ5Z0k= zZJARv^vFkmWx15CB=rb=D4${+#DVqy5$C%bf`!T0+epLJLnh1jwCdb*zuCL}eEFvE z{rO1%gxg>1!W(I!owu*mJZ0@6FM(?C+d*CeceZRW_4id*D9p5nzMY&{mWqrJomjIZ z97ZNnZ3_%Hx8dn;H>p8m7F#^2;T%yZ3H;a&N7tm=Lvs&lgJLW{V1@h&6Vy~!+Ffbb zv(n3+v)_D$}dqd!2>Y2B)#<+o}LH#%ogGi2-?xRIH)1!SD)u-L65B&bsJTC=LiaF+YOCif2dUX6uAA|#+vNR z>U+KQekVGon)Yi<93(d!(yw1h3&X0N(PxN2{%vn}cnV?rYw z$N^}_o!XUB!mckL`yO1rnUaI4wrOeQ(+&k?2mi47hzxSD`N#-byqd1IhEoh!PGq>t z_MRy{5B0eKY>;Ao3z$RUU7U+i?iX^&r739F)itdrTpAi-NN0=?^m%?{A9Ly2pVv>Lqs6moTP?T2-AHqFD-o_ znVr|7OAS#AEH}h8SRPQ@NGG47dO}l=t07__+iK8nHw^(AHx&Wb<%jPc$$jl6_p(b$ z)!pi(0fQodCHfM)KMEMUR&UID>}m^(!{C^U7sBDOA)$VThRCI0_+2=( zV8mMq0R(#z;C|7$m>$>`tX+T|xGt(+Y48@ZYu#z;0pCgYgmMVbFb!$?%yhZqP_nhn zy4<#3P1oQ#2b51NU1mGnHP$cf0j-YOgAA}A$QoL6JVLcmExs(kU{4z;PBHJD%_=0F z>+sQV`mzijSIT7xn%PiDKHOujX;n|M&qr1T@rOxTdxtZ!&u&3HHFLYD5$RLQ=heur zb>+AFokUVQeJy-#LP*^)spt{mb@Mqe=A~-4p0b+Bt|pZ+@CY+%x}9f}izU5;4&QFE zO1bhg&A4uC1)Zb67kuowWY4xbo&J=%yoXlFB)&$d*-}kjBu|w!^zbD1YPc0-#XTJr z)pm2RDy%J3jlqSMq|o%xGS$bPwn4AqitC6&e?pqWcjWPt{3I{>CBy;hg0Umh#c;hU3RhCUX=8aR>rmd` z7Orw(5tcM{|-^J?ZAA9KP|)X6n9$-kvr#j5YDecTM6n z&07(nD^qb8hpF0B^z^pQ*%5ePYkv&FabrlI61ntiVp!!C8y^}|<2xgAd#FY=8b*y( zuQOuvy2`Ii^`VBNJB&R!0{hABYX55ooCAJSSevl4RPqEGb)iy_0H}v@vFwFzD%>#I>)3PsouQ+_Kkbqy*kKdHdfkN7NBcq%V{x^fSxgXpg7$bF& zj!6AQbDY(1u#1_A#1UO9AxiZaCVN2F0wGXdY*g@x$ByvUA?ePdide0dmr#}udE%K| z3*k}Vv2Ew2u1FXBaVA6aerI36R&rzEZeDDCl5!t0J=ug6kuNZzH>3i_VN`%BsaVB3 zQYw|Xub_SGf{)F{$ZX5`Jc!X!;eybjP+o$I{Z^Hsj@D=E{MnnL+TbC@HEU2DjG{3-LDGIbq()U87x4eS;JXnSh;lRlJ z>EL3D>wHt-+wTjQF$fGyDO$>d+(fq@bPpLBS~xA~R=3JPbS{tzN(u~m#Po!?H;IYv zE;?8%^vle|%#oux(Lj!YzBKv+Fd}*Ur-dCBoX*t{KeNM*n~ZPYJ4NNKkI^MFbz9!v z4(Bvm*Kc!-$%VFEewYJKz-CQN{`2}KX4*CeJEs+Q(!kI%hN1!1P6iOq?ovz}X0IOi z)YfWpwW@pK08^69#wSyCZkX9?uZD?C^@rw^Y?gLS_xmFKkooyx$*^5#cPqntNTtSG zlP>XLMj2!VF^0k#ole7`-c~*~+_T5ls?x4)ah(j8vo_ zwb%S8qoaZqY0-$ZI+ViIA_1~~rAH7K_+yFS{0rT@eQtTAdz#8E5VpwnW!zJ_^{Utv zlW5Iar3V5t&H4D6A=>?mq;G92;1cg9a2sf;gY9pJDVKn$DYdQlvfXq}zz8#LyPGq@ z+`YUMD;^-6w&r-82JL7mA8&M~Pj@aK!m{0+^v<|t%APYf7`}jGEhdYLqsHW-Le9TL z_hZZ1gbrz7$f9^fAzVIP30^KIz!!#+DRLL+qMszvI_BpOSmjtl$hh;&UeM{ER@INV zcI}VbiVTPoN|iSna@=7XkP&-4#06C};8ajbxJ4Gcq8(vWv4*&X8bM^T$mBk75Q92j z1v&%a;OSKc8EIrodmIiw$lOES2hzGDcjjB`kEDfJe{r}yE6`eZL zEB`9u>Cl0IsQ+t}`-cx}{6jqcANucqIB>Qmga_&<+80E2Q|VHHQ$YlAt{6`Qu`HA3 z03s0-sSlwbvgi&_R8s={6<~M^pGvBNjKOa>tWenzS8s zR>L7R5aZ=mSU{f?ib4Grx$AeFvtO5N|D>9#)ChH#Fny2maHWHOf2G=#<9Myot#+4u zWVa6d^Vseq_0=#AYS(-m$Lp;*8nC_6jXIjEM`omUmtH@QDs3|G)i4j*#_?#UYVZvJ z?YjT-?!4Q{BNun;dKBWLEw2C-VeAz`%?A>p;)PL}TAZn5j~HK>v1W&anteARlE+~+ zj>c(F;?qO3pXBb|#OZdQnm<4xWmn~;DR5SDMxt0UK_F^&eD|KZ=O;tO3vy4@4h^;2 zUL~-z`-P1aOe?|ZC1BgVsL)2^J-&vIFI%q@40w0{jjEfeVl)i9(~bt2z#2Vm)p`V_ z1;6$Ae7=YXk#=Qkd24Y23t&GvRxaOoad~NbJ+6pxqzJ>FY#Td7@`N5xp!n(c!=RE& z&<<@^a$_Ys8jqz4|5Nk#FY$~|FPC0`*a5HH!|Gssa9=~66&xG9)|=pOOJ2KE5|YrR zw!w6K2aC=J$t?L-;}5hn6mHd%hC;p8P|Dgh6D>hGnXPgi;6r+eA=?f72y9(Cf_ho{ zH6#)uD&R=73^$$NE;5piWX2bzR67fQ)`b=85o0eOLGI4c-Tb@-KNi2pz=Ke@SDcPn za$AxXib84`!Sf;Z3B@TSo`Dz7GM5Kf(@PR>Ghzi=BBxK8wRp>YQoXm+iL>H*Jo9M3 z6w&E?BC8AFTFT&Tv8zf+m9<&S&%dIaZ)Aoqkak_$r-2{$d~0g2oLETx9Y`eOAf14QXEQw3tJne;fdzl@wV#TFXSLXM2428F-Q}t+n2g%vPRMUzYPvzQ9f# zu(liiJem9P*?0%V@RwA7F53r~|I!Ty)<*AsMX3J{_4&}{6pT%Tpw>)^|DJ)>gpS~1rNEh z0$D?uO8mG?H;2BwM5a*26^7YO$XjUm40XmBsb63MoR;bJh63J;OngS5sSI+o2HA;W zdZV#8pDpC9Oez&L8loZO)MClRz!_!WD&QRtQxnazhT%Vj6Wl4G11nUk8*vSeVab@N#oJ}`KyJv+8Mo@T1-pqZ1t|?cnaVOd;1(h9 z!$DrN=jcGsVYE-0-n?oCJ^4x)F}E;UaD-LZUIzcD?W^ficqJWM%QLy6QikrM1aKZC zi{?;oKwq^Vsr|&`i{jIphA8S6G4)$KGvpULjH%9u(Dq247;R#l&I0{IhcC|oBF*Al zvLo7Xte=C{aIt*otJD}BUq)|_pdR>{zBMT< z(^1RpZv*l*m*OV^8>9&asGBo8h*_4q*)-eCv*|Pq=XNGrZE)^(SF7^{QE_~4VDB(o zVcPA_!G+2CAtLbl+`=Q~9iW`4ZRLku!uB?;tWqVjB0lEOf}2RD7dJ=BExy=<9wkb- z9&7{XFA%n#JsHYN8t5d~=T~5DcW4$B%3M+nNvC2`0!#@sckqlzo5;hhGi(D9=*A4` z5ynobawSPRtWn&CDLEs3Xf`(8^zDP=NdF~F^s&={l7(aw&EG}KWpMjtmz7j_VLO;@ zM2NVLDxZ@GIv7*gzl1 zjq78tv*8#WSY`}Su0&C;2F$Ze(q>F(@Wm^Gw!)(j;dk9Ad{STaxn)IV9FZhm*n+U} zi;4y*3v%A`_c7a__DJ8D1b@dl0Std3F||4Wtvi)fCcBRh!X9$1x!_VzUh>*S5s!oq z;qd{J_r79EL2wIeiGAqFstWtkfIJpjVh%zFo*=55B9Zq~y0=^iqHWfQl@O!Ak;(o*m!pZqe9 z%U2oDOhR)BvW8&F70L;2TpkzIutIvNQaTjjs5V#8mV4!NQ}zN=i`i@WI1z0eN-iCS z;vL-Wxc^Vc_qK<5RPh(}*8dLT{~GzE{w2o$2kMFaEl&q zP{V=>&3kW7tWaK-Exy{~`v4J0U#OZBk{a9{&)&QG18L@6=bsZ1zC_d{{pKZ-Ey>I> z;8H0t4bwyQqgu4hmO`3|4K{R*5>qnQ&gOfdy?z`XD%e5+pTDzUt3`k^u~SaL&XMe= z9*h#kT(*Q9jO#w2Hd|Mr-%DV8i_1{J1MU~XJ3!WUplhXDYBpJH><0OU`**nIvPIof z|N8@I=wA)sf45SAvx||f?Z5uB$kz1qL3Ky_{%RPdP5iN-D2!p5scq}buuC00C@jom zhfGKm3|f?Z0iQ|K$Z~!`8{nmAS1r+fp6r#YDOS8V*;K&Gs7Lc&f^$RC66O|)28oh`NHy&vq zJh+hAw8+ybTB0@VhWN^0iiTnLsCWbS_y`^gs!LX!Lw{yE``!UVzrV24tP8o;I6-65 z1MUiHw^{bB15tmrVT*7-#sj6cs~z`wk52YQJ*TG{SE;KTm#Hf#a~|<(|ImHH17nNM z`Ub{+J3dMD!)mzC8b(2tZtokKW5pAwHa?NFiso~# z1*iaNh4lQ4TS)|@G)H4dZV@l*Vd;Rw;-;odDhW2&lJ%m@jz+Panv7LQm~2Js6rOW3 z0_&2cW^b^MYW3)@o;neZ<{B4c#m48dAl$GCc=$>ErDe|?y@z`$uq3xd(%aAsX)D%l z>y*SQ%My`yDP*zof|3@_w#cjaW_YW4BdA;#Glg1RQcJGY*CJ9`H{@|D+*e~*457kd z73p<%fB^PV!Ybw@)Dr%(ZJbX}xmCStCYv#K3O32ej{$9IzM^I{6FJ8!(=azt7RWf4 z7ib0UOPqN40X!wOnFOoddd8`!_IN~9O)#HRTyjfc#&MCZ zZAMzOVB=;qwt8gV?{Y2?b=iSZG~RF~uyx18K)IDFLl})G1v@$(s{O4@RJ%OTJyF+Cpcx4jmy|F3euCnMK!P2WTDu5j z{{gD$=M*pH!GGzL%P)V2*ROm>!$Y=z|D`!_yY6e7SU$~a5q8?hZGgaYqaiLnkK%?0 zs#oI%;zOxF@g*@(V4p!$7dS1rOr6GVs6uYCTt2h)eB4?(&w8{#o)s#%gN@BBosRUe z)@P@8_Zm89pr~)b>e{tbPC~&_MR--iB{=)y;INU5#)@Gix-YpgP<-c2Ms{9zuCX|3 z!p(?VaXww&(w&uBHzoT%!A2=3HAP>SDxcljrego7rY|%hxy3XlODWffO_%g|l+7Y_ zqV(xbu)s4lV=l7M;f>vJl{`6qBm>#ZeMA}kXb97Z)?R97EkoI?x6Lp0yu1Z>PS?2{ z0QQ(8D)|lc9CO3B~e(pQM&5(1y&y=e>C^X$`)_&XuaI!IgDTVqt31wX#n+@!a_A0ZQkA zCJ2@M_4Gb5MfCrm5UPggeyh)8 zO9?`B0J#rkoCx(R0I!ko_2?iO@|oRf1;3r+i)w-2&j?=;NVIdPFsB)`|IC0zk6r9c zRrkfxWsiJ(#8QndNJj@{@WP2Ackr|r1VxV{7S&rSU(^)-M8gV>@UzOLXu9K<{6e{T zXJ6b92r$!|lwjhmgqkdswY&}c)KW4A)-ac%sU;2^fvq7gfUW4Bw$b!i@duy1CAxSn z(pyh$^Z=&O-q<{bZUP+$U}=*#M9uVc>CQVgDs4swy5&8RAHZ~$)hrTF4W zPsSa~qYv_0mJnF89RnnJTH`3}w4?~epFl=D(35$ zWa07ON$`OMBOHgCmfO(9RFc<)?$x)N}Jd2A(<*Ll7+4jrRt9w zwGxExUXd9VB#I|DwfxvJ;HZ8Q{37^wDhaZ%O!oO(HpcqfLH%#a#!~;Jl7F5>EX_=8 z{()l2NqPz>La3qJR;_v+wlK>GsHl;uRA8%j`A|yH@k5r%55S9{*Cp%uw6t`qc1!*T za2OeqtQj7sAp#Q~=5Fs&aCR9v>5V+s&RdNvo&H~6FJOjvaj--2sYYBvMq;55%z8^o z|BJDA4vzfow#DO#ZQHh;Oq_{r+qP{R9ox2TOgwQiv7Ow!zjN+A@BN;0tA2lUb#+zO z(^b89eV)D7UVE+h{mcNc6&GtpOqDn_?VAQ)Vob$hlFwW%xh>D#wml{t&Ofmm_d_+; zKDxzdr}`n2Rw`DtyIjrG)eD0vut$}dJAZ0AohZ+ZQdWXn_Z@dI_y=7t3q8x#pDI-K z2VVc&EGq445Rq-j0=U=Zx`oBaBjsefY;%)Co>J3v4l8V(T8H?49_@;K6q#r~Wwppc z4XW0(4k}cP=5ex>-Xt3oATZ~bBWKv)aw|I|Lx=9C1s~&b77idz({&q3T(Y(KbWO?+ zmcZ6?WeUsGk6>km*~234YC+2e6Zxdl~<_g2J|IE`GH%n<%PRv-50; zH{tnVts*S5*_RxFT9eM0z-pksIb^drUq4>QSww=u;UFCv2AhOuXE*V4z?MM`|ABOC4P;OfhS(M{1|c%QZ=!%rQTDFx`+}?Kdx$&FU?Y<$x;j7z=(;Lyz+?EE>ov!8vvMtSzG!nMie zsBa9t8as#2nH}n8xzN%W%U$#MHNXmDUVr@GX{?(=yI=4vks|V)!-W5jHsU|h_&+kY zS_8^kd3jlYqOoiI`ZqBVY!(UfnAGny!FowZWY_@YR0z!nG7m{{)4OS$q&YDyw6vC$ zm4!$h>*|!2LbMbxS+VM6&DIrL*X4DeMO!@#EzMVfr)e4Tagn~AQHIU8?e61TuhcKD zr!F4(kEebk(Wdk-?4oXM(rJwanS>Jc%<>R(siF+>+5*CqJLecP_we33iTFTXr6W^G z7M?LPC-qFHK;E!fxCP)`8rkxZyFk{EV;G-|kwf4b$c1k0atD?85+|4V%YATWMG|?K zLyLrws36p%Qz6{}>7b>)$pe>mR+=IWuGrX{3ZPZXF3plvuv5Huax86}KX*lbPVr}L z{C#lDjdDeHr~?l|)Vp_}T|%$qF&q#U;ClHEPVuS+Jg~NjC1RP=17=aQKGOcJ6B3mp z8?4*-fAD~}sX*=E6!}^u8)+m2j<&FSW%pYr_d|p_{28DZ#Cz0@NF=gC-o$MY?8Ca8 zr5Y8DSR^*urS~rhpX^05r30Ik#2>*dIOGxRm0#0YX@YQ%Mg5b6dXlS!4{7O_kdaW8PFSdj1=ryI-=5$fiieGK{LZ+SX(1b=MNL!q#lN zv98?fqqTUH8r8C7v(cx#BQ5P9W>- zmW93;eH6T`vuJ~rqtIBg%A6>q>gnWb3X!r0wh_q;211+Om&?nvYzL1hhtjB zK_7G3!n7PL>d!kj){HQE zE8(%J%dWLh1_k%gVXTZt zEdT09XSKAx27Ncaq|(vzL3gm83q>6CAw<$fTnMU05*xAe&rDfCiu`u^1)CD<>sx0i z*hr^N_TeN89G(nunZoLBf^81#pmM}>JgD@Nn1l*lN#a=B=9pN%tmvYFjFIoKe_(GF z-26x{(KXdfsQL7Uv6UtDuYwV`;8V3w>oT_I<`Ccz3QqK9tYT5ZQzbop{=I=!pMOCb zCU68`n?^DT%^&m>A%+-~#lvF!7`L7a{z<3JqIlk1$<||_J}vW1U9Y&eX<}l8##6i( zZcTT@2`9(Mecptm@{3A_Y(X`w9K0EwtPq~O!16bq{7c0f7#(3wn-^)h zxV&M~iiF!{-6A@>o;$RzQ5A50kxXYj!tcgme=Qjrbje~;5X2xryU;vH|6bE(8z^<7 zQ>BG7_c*JG8~K7Oe68i#0~C$v?-t@~@r3t2inUnLT(c=URpA9kA8uq9PKU(Ps(LVH zqgcqW>Gm?6oV#AldDPKVRcEyQIdTT`Qa1j~vS{<;SwyTdr&3*t?J)y=M7q*CzucZ&B0M=joT zBbj@*SY;o2^_h*>R0e({!QHF0=)0hOj^B^d*m>SnRrwq>MolNSgl^~r8GR#mDWGYEIJA8B<|{{j?-7p zVnV$zancW3&JVDtVpIlI|5djKq0(w$KxEFzEiiL=h5Jw~4Le23@s(mYyXWL9SX6Ot zmb)sZaly_P%BeX_9 zw&{yBef8tFm+%=--m*J|o~+Xg3N+$IH)t)=fqD+|fEk4AAZ&!wcN5=mi~Vvo^i`}> z#_3ahR}Ju)(Px7kev#JGcSwPXJ2id9%Qd2A#Uc@t8~egZ8;iC{e! z%=CGJOD1}j!HW_sgbi_8suYnn4#Ou}%9u)dXd3huFIb!ytlX>Denx@pCS-Nj$`VO&j@(z!kKSP0hE4;YIP#w9ta=3DO$7f*x zc9M4&NK%IrVmZAe=r@skWD`AEWH=g+r|*13Ss$+{c_R!b?>?UaGXlw*8qDmY#xlR= z<0XFbs2t?8i^G~m?b|!Hal^ZjRjt<@a? z%({Gn14b4-a|#uY^=@iiKH+k?~~wTj5K1A&hU z2^9-HTC)7zpoWK|$JXaBL6C z#qSNYtY>65T@Zs&-0cHeu|RX(Pxz6vTITdzJdYippF zC-EB+n4}#lM7`2Ry~SO>FxhKboIAF#Z{1wqxaCb{#yEFhLuX;Rx(Lz%T`Xo1+a2M}7D+@wol2)OJs$TwtRNJ={( zD@#zTUEE}#Fz#&(EoD|SV#bayvr&E0vzmb%H?o~46|FAcx?r4$N z&67W3mdip-T1RIxwSm_&(%U|+WvtGBj*}t69XVd&ebn>KOuL(7Y8cV?THd-(+9>G7*Nt%T zcH;`p={`SOjaf7hNd(=37Lz3-51;58JffzIPgGs_7xIOsB5p2t&@v1mKS$2D$*GQ6 zM(IR*j4{nri7NMK9xlDy-hJW6sW|ZiDRaFiayj%;(%51DN!ZCCCXz+0Vm#};70nOx zJ#yA0P3p^1DED;jGdPbQWo0WATN=&2(QybbVdhd=Vq*liDk`c7iZ?*AKEYC#SY&2g z&Q(Ci)MJ{mEat$ZdSwTjf6h~roanYh2?9j$CF@4hjj_f35kTKuGHvIs9}Re@iKMxS-OI*`0S z6s)fOtz}O$T?PLFVSeOjSO26$@u`e<>k(OSP!&YstH3ANh>)mzmKGNOwOawq-MPXe zy4xbeUAl6tamnx))-`Gi2uV5>9n(73yS)Ukma4*7fI8PaEwa)dWHs6QA6>$}7?(L8 ztN8M}?{Tf!Zu22J5?2@95&rQ|F7=FK-hihT-vDp!5JCcWrVogEnp;CHenAZ)+E+K5 z$Cffk5sNwD_?4+ymgcHR(5xgt20Z8M`2*;MzOM#>yhk{r3x=EyM226wb&!+j`W<%* zSc&|`8!>dn9D@!pYow~(DsY_naSx7(Z4i>cu#hA5=;IuI88}7f%)bRkuY2B;+9Uep zpXcvFWkJ!mQai63BgNXG26$5kyhZ2&*3Q_tk)Ii4M>@p~_~q_cE!|^A;_MHB;7s#9 zKzMzK{lIxotjc};k67^Xsl-gS!^*m*m6kn|sbdun`O?dUkJ{0cmI0-_2y=lTAfn*Y zKg*A-2sJq)CCJgY0LF-VQvl&6HIXZyxo2#!O&6fOhbHXC?%1cMc6y^*dOS{f$=137Ds1m01qs`>iUQ49JijsaQ( zksqV9@&?il$|4Ua%4!O15>Zy&%gBY&wgqB>XA3!EldQ%1CRSM(pp#k~-pkcCg4LAT zXE=puHbgsw)!xtc@P4r~Z}nTF=D2~j(6D%gTBw$(`Fc=OOQ0kiW$_RDd=hcO0t97h zb86S5r=>(@VGy1&#S$Kg_H@7G^;8Ue)X5Y+IWUi`o;mpvoV)`fcVk4FpcT|;EG!;? zHG^zrVVZOm>1KFaHlaogcWj(v!S)O(Aa|Vo?S|P z5|6b{qkH(USa*Z7-y_Uvty_Z1|B{rTS^qmEMLEYUSk03_Fg&!O3BMo{b^*`3SHvl0 zhnLTe^_vVIdcSHe)SQE}r~2dq)VZJ!aSKR?RS<(9lzkYo&dQ?mubnWmgMM37Nudwo z3Vz@R{=m2gENUE3V4NbIzAA$H1z0pagz94-PTJyX{b$yndsdKptmlKQKaaHj@3=ED zc7L?p@%ui|RegVYutK$64q4pe9+5sv34QUpo)u{1ci?)_7gXQd{PL>b0l(LI#rJmN zGuO+%GO`xneFOOr4EU(Wg}_%bhzUf;d@TU+V*2#}!2OLwg~%D;1FAu=Un>OgjPb3S z7l(riiCwgghC=Lm5hWGf5NdGp#01xQ59`HJcLXbUR3&n%P(+W2q$h2Qd z*6+-QXJ*&Kvk9ht0f0*rO_|FMBALen{j7T1l%=Q>gf#kma zQlg#I9+HB+z*5BMxdesMND`_W;q5|FaEURFk|~&{@qY32N$G$2B=&Po{=!)x5b!#n zxLzblkq{yj05#O7(GRuT39(06FJlalyv<#K4m}+vs>9@q-&31@1(QBv82{}Zkns~K ze{eHC_RDX0#^A*JQTwF`a=IkE6Ze@j#-8Q`tTT?k9`^ZhA~3eCZJ-Jr{~7Cx;H4A3 zcZ+Zj{mzFZbVvQ6U~n>$U2ZotGsERZ@}VKrgGh0xM;Jzt29%TX6_&CWzg+YYMozrM z`nutuS)_0dCM8UVaKRj804J4i%z2BA_8A4OJRQ$N(P9Mfn-gF;4#q788C@9XR0O3< zsoS4wIoyt046d+LnSCJOy@B@Uz*#GGd#+Ln1ek5Dv>(ZtD@tgZlPnZZJGBLr^JK+!$$?A_fA3LOrkoDRH&l7 zcMcD$Hsjko3`-{bn)jPL6E9Ds{WskMrivsUu5apD z?grQO@W7i5+%X&E&p|RBaEZ(sGLR@~(y^BI@lDMot^Ll?!`90KT!JXUhYS`ZgX3jnu@Ja^seA*M5R@f`=`ynQV4rc$uT1mvE?@tz)TN<=&H1%Z?5yjxcpO+6y_R z6EPuPKM5uxKpmZfT(WKjRRNHs@ib)F5WAP7QCADvmCSD#hPz$V10wiD&{NXyEwx5S z6NE`3z!IS^$s7m}PCwQutVQ#~w+V z=+~->DI*bR2j0^@dMr9`p>q^Ny~NrAVxrJtX2DUveic5vM%#N*XO|?YAWwNI$Q)_) zvE|L(L1jP@F%gOGtnlXtIv2&1i8q<)Xfz8O3G^Ea~e*HJsQgBxWL(yuLY+jqUK zRE~`-zklrGog(X}$9@ZVUw!8*=l`6mzYLtsg`AvBYz(cxmAhr^j0~(rzXdiOEeu_p zE$sf2(w(BPAvO5DlaN&uQ$4@p-b?fRs}d7&2UQ4Fh?1Hzu*YVjcndqJLw0#q@fR4u zJCJ}>_7-|QbvOfylj+e^_L`5Ep9gqd>XI3-O?Wp z-gt*P29f$Tx(mtS`0d05nHH=gm~Po_^OxxUwV294BDKT>PHVlC5bndncxGR!n(OOm znsNt@Q&N{TLrmsoKFw0&_M9$&+C24`sIXGWgQaz=kY;S{?w`z^Q0JXXBKFLj0w0U6P*+jPKyZHX9F#b0D1$&(- zrm8PJd?+SrVf^JlfTM^qGDK&-p2Kdfg?f>^%>1n8bu&byH(huaocL>l@f%c*QkX2i znl}VZ4R1en4S&Bcqw?$=Zi7ohqB$Jw9x`aM#>pHc0x z0$!q7iFu zZ`tryM70qBI6JWWTF9EjgG@>6SRzsd}3h+4D8d~@CR07P$LJ}MFsYi-*O%XVvD@yT|rJ+Mk zDllJ7$n0V&A!0flbOf)HE6P_afPWZmbhpliqJuw=-h+r;WGk|ntkWN(8tKlYpq5Ow z(@%s>IN8nHRaYb*^d;M(D$zGCv5C|uqmsDjwy4g=Lz>*OhO3z=)VD}C<65;`89Ye} zSCxrv#ILzIpEx1KdLPlM&%Cctf@FqTKvNPXC&`*H9=l=D3r!GLM?UV zOxa(8ZsB`&+76S-_xuj?G#wXBfDY@Z_tMpXJS7^mp z@YX&u0jYw2A+Z+bD#6sgVK5ZgdPSJV3>{K^4~%HV?rn~4D)*2H!67Y>0aOmzup`{D zzDp3c9yEbGCY$U<8biJ_gB*`jluz1ShUd!QUIQJ$*1;MXCMApJ^m*Fiv88RZ zFopLViw}{$Tyhh_{MLGIE2~sZ)t0VvoW%=8qKZ>h=adTe3QM$&$PO2lfqH@brt!9j ziePM8$!CgE9iz6B<6_wyTQj?qYa;eC^{x_0wuwV~W+^fZmFco-o%wsKSnjXFEx02V zF5C2t)T6Gw$Kf^_c;Ei3G~uC8SM-xyycmXyC2hAVi-IfXqhu$$-C=*|X?R0~hu z8`J6TdgflslhrmDZq1f?GXF7*ALeMmOEpRDg(s*H`4>_NAr`2uqF;k;JQ+8>A|_6ZNsNLECC%NNEb1Y1dP zbIEmNpK)#XagtL4R6BC{C5T(+=yA-(Z|Ap}U-AfZM#gwVpus3(gPn}Q$CExObJ5AC z)ff9Yk?wZ}dZ-^)?cbb9Fw#EjqQ8jxF4G3=L?Ra zg_)0QDMV1y^A^>HRI$x?Op@t;oj&H@1xt4SZ9(kifQ zb59B*`M99Td7@aZ3UWvj1rD0sE)d=BsBuW*KwkCds7ay(7*01_+L}b~7)VHI>F_!{ zyxg-&nCO?v#KOUec0{OOKy+sjWA;8rTE|Lv6I9H?CI?H(mUm8VXGwU$49LGpz&{nQp2}dinE1@lZ1iox6{ghN&v^GZv9J${7WaXj)<0S4g_uiJ&JCZ zr8-hsu`U%N;+9N^@&Q0^kVPB3)wY(rr}p7{p0qFHb3NUUHJb672+wRZs`gd1UjKPX z4o6zljKKA+Kkj?H>Ew63o%QjyBk&1!P22;MkD>sM0=z_s-G{mTixJCT9@_|*(p^bz zJ8?ZZ&;pzV+7#6Mn`_U-)k8Pjg?a;|Oe^us^PoPY$Va~yi8|?+&=y$f+lABT<*pZr zP}D{~Pq1Qyni+@|aP;ixO~mbEW9#c0OU#YbDZIaw=_&$K%Ep2f%hO^&P67hApZe`x zv8b`Mz@?M_7-)b!lkQKk)JXXUuT|B8kJlvqRmRpxtQDgvrHMXC1B$M@Y%Me!BSx3P z#2Eawl$HleZhhTS6Txm>lN_+I`>eV$&v9fOg)%zVn3O5mI*lAl>QcHuW6!Kixmq`X zBCZ*Ck6OYtDiK!N47>jxI&O2a9x7M|i^IagRr-fmrmikEQGgw%J7bO|)*$2FW95O4 zeBs>KR)izRG1gRVL;F*sr8A}aRHO0gc$$j&ds8CIO1=Gwq1%_~E)CWNn9pCtBE}+`Jelk4{>S)M)`Ll=!~gnn1yq^EX(+y*ik@3Ou0qU`IgYi3*doM+5&dU!cho$pZ zn%lhKeZkS72P?Cf68<#kll_6OAO26bIbueZx**j6o;I0cS^XiL`y+>{cD}gd%lux} z)3N>MaE24WBZ}s0ApfdM;5J_Ny}rfUyxfkC``Awo2#sgLnGPewK};dORuT?@I6(5~ z?kE)Qh$L&fwJXzK){iYx!l5$Tt|^D~MkGZPA}(o6f7w~O2G6Vvzdo*a;iXzk$B66$ zwF#;wM7A+(;uFG4+UAY(2`*3XXx|V$K8AYu#ECJYSl@S=uZW$ksfC$~qrrbQj4??z-)uz0QL}>k^?fPnJTPw% zGz)~?B4}u0CzOf@l^um}HZzbaIwPmb<)< zi_3@E9lc)Qe2_`*Z^HH;1CXOceL=CHpHS{HySy3T%<^NrWQ}G0i4e1xm_K3(+~oi$ zoHl9wzb?Z4j#90DtURtjtgvi7uw8DzHYmtPb;?%8vb9n@bszT=1qr)V_>R%s!92_` zfnHQPANx z<#hIjIMm#*(v*!OXtF+w8kLu`o?VZ5k7{`vw{Yc^qYclpUGIM_PBN1+c{#Vxv&E*@ zxg=W2W~JuV{IuRYw3>LSI1)a!thID@R=bU+cU@DbR^_SXY`MC7HOsCN z!dO4OKV7(E_Z8T#8MA1H`99?Z!r0)qKW_#|29X3#Jb+5+>qUidbeP1NJ@)(qi2S-X zao|f0_tl(O+$R|Qwd$H{_ig|~I1fbp_$NkI!0E;Y z6JrnU{1Ra6^on{9gUUB0mwzP3S%B#h0fjo>JvV~#+X0P~JV=IG=yHG$O+p5O3NUgG zEQ}z6BTp^Fie)Sg<){Z&I8NwPR(=mO4joTLHkJ>|Tnk23E(Bo`FSbPc05lF2-+)X? z6vV3*m~IBHTy*^E!<0nA(tCOJW2G4DsH7)BxLV8kICn5lu6@U*R`w)o9;Ro$i8=Q^V%uH8n3q=+Yf;SFRZu z!+F&PKcH#8cG?aSK_Tl@K9P#8o+jry@gdexz&d(Q=47<7nw@e@FFfIRNL9^)1i@;A z28+$Z#rjv-wj#heI|<&J_DiJ*s}xd-f!{J8jfqOHE`TiHHZVIA8CjkNQ_u;Ery^^t zl1I75&u^`1_q)crO+JT4rx|z2ToSC>)Or@-D zy3S>jW*sNIZR-EBsfyaJ+Jq4BQE4?SePtD2+jY8*%FsSLZ9MY>+wk?}}}AFAw)vr{ml)8LUG-y9>^t!{~|sgpxYc0Gnkg`&~R z-pilJZjr@y5$>B=VMdZ73svct%##v%wdX~9fz6i3Q-zOKJ9wso+h?VME7}SjL=!NUG{J?M&i!>ma`eoEa@IX`5G>B1(7;%}M*%-# zfhJ(W{y;>MRz!Ic8=S}VaBKqh;~7KdnGEHxcL$kA-6E~=!hrN*zw9N+_=odt<$_H_8dbo;0=42wcAETPCVGUr~v(`Uai zb{=D!Qc!dOEU6v)2eHSZq%5iqK?B(JlCq%T6av$Cb4Rko6onlG&?CqaX7Y_C_cOC3 zYZ;_oI(}=>_07}Oep&Ws7x7-R)cc8zfe!SYxJYP``pi$FDS)4Fvw5HH=FiU6xfVqIM!hJ;Rx8c0cB7~aPtNH(Nmm5Vh{ibAoU#J6 zImRCr?(iyu_4W_6AWo3*vxTPUw@vPwy@E0`(>1Qi=%>5eSIrp^`` zK*Y?fK_6F1W>-7UsB)RPC4>>Ps9)f+^MqM}8AUm@tZ->j%&h1M8s*s!LX5&WxQcAh z8mciQej@RPm?660%>{_D+7er>%zX_{s|$Z+;G7_sfNfBgY(zLB4Ey}J9F>zX#K0f6 z?dVNIeEh?EIShmP6>M+d|0wMM85Sa4diw1hrg|ITJ}JDg@o8y>(rF9mXk5M z2@D|NA)-7>wD&wF;S_$KS=eE84`BGw3g0?6wGxu8ys4rwI?9U=*^VF22t3%mbGeOh z`!O-OpF7#Vceu~F`${bW0nYVU9ecmk31V{tF%iv&5hWofC>I~cqAt@u6|R+|HLMMX zVxuSlMFOK_EQ86#E8&KwxIr8S9tj_goWtLv4f@!&h8;Ov41{J~496vp9vX=(LK#j! zAwi*21RAV-LD>9Cw3bV_9X(X3)Kr0-UaB*7Y>t82EQ%!)(&(XuAYtTsYy-dz+w=$ir)VJpe!_$ z6SGpX^i(af3{o=VlFPC);|J8#(=_8#vdxDe|Cok+ANhYwbE*FO`Su2m1~w+&9<_9~ z-|tTU_ACGN`~CNW5WYYBn^B#SwZ(t4%3aPp z;o)|L6Rk569KGxFLUPx@!6OOa+5OjQLK5w&nAmwxkC5rZ|m&HT8G%GVZxB_@ME z>>{rnXUqyiJrT(8GMj_ap#yN_!9-lO5e8mR3cJiK3NE{_UM&=*vIU`YkiL$1%kf+1 z4=jk@7EEj`u(jy$HnzE33ZVW_J4bj}K;vT?T91YlO(|Y0FU4r+VdbmQ97%(J5 zkK*Bed8+C}FcZ@HIgdCMioV%A<*4pw_n}l*{Cr4}a(lq|injK#O?$tyvyE`S%(1`H z_wwRvk#13ElkZvij2MFGOj`fhy?nC^8`Zyo%yVcUAfEr8x&J#A{|moUBAV_^f$hpaUuyQeY3da^ zS9iRgf87YBwfe}>BO+T&Fl%rfpZh#+AM?Dq-k$Bq`vG6G_b4z%Kbd&v>qFjow*mBl z-OylnqOpLg}or7_VNwRg2za3VBK6FUfFX{|TD z`Wt0Vm2H$vdlRWYQJqDmM?JUbVqL*ZQY|5&sY*?!&%P8qhA~5+Af<{MaGo(dl&C5t zE%t!J0 zh6jqANt4ABdPxSTrVV}fLsRQal*)l&_*rFq(Ez}ClEH6LHv{J#v?+H-BZ2)Wy{K@9 z+ovXHq~DiDvm>O~r$LJo!cOuwL+Oa--6;UFE2q@g3N8Qkw5E>ytz^(&($!O47+i~$ zKM+tkAd-RbmP{s_rh+ugTD;lriL~`Xwkad#;_aM?nQ7L_muEFI}U_4$phjvYgleK~`Fo`;GiC07&Hq1F<%p;9Q;tv5b?*QnR%8DYJH3P>Svmv47Y>*LPZJy8_{9H`g6kQpyZU{oJ`m%&p~D=K#KpfoJ@ zn-3cqmHsdtN!f?~w+(t+I`*7GQA#EQC^lUA9(i6=i1PqSAc|ha91I%X&nXzjYaM{8$s&wEx@aVkQ6M{E2 zfzId#&r(XwUNtPcq4Ngze^+XaJA1EK-%&C9j>^9(secqe{}z>hR5CFNveMsVA)m#S zk)_%SidkY-XmMWlVnQ(mNJ>)ooszQ#vaK;!rPmGKXV7am^_F!Lz>;~{VrIO$;!#30XRhE1QqO_~#+Ux;B_D{Nk=grn z8Y0oR^4RqtcYM)7a%@B(XdbZCOqnX#fD{BQTeLvRHd(irHKq=4*jq34`6@VAQR8WG z^%)@5CXnD_T#f%@-l${>y$tfb>2LPmc{~5A82|16mH)R?&r#KKLs7xpN-D`=&Cm^R zvMA6#Ahr<3X>Q7|-qfTY)}32HkAz$_mibYV!I)u>bmjK`qwBe(>za^0Kt*HnFbSdO z1>+ryKCNxmm^)*$XfiDOF2|{-v3KKB?&!(S_Y=Ht@|ir^hLd978xuI&N{k>?(*f8H z=ClxVJK_%_z1TH0eUwm2J+2To7FK4o+n_na)&#VLn1m;!+CX+~WC+qg1?PA~KdOlC zW)C@pw75_xoe=w7i|r9KGIvQ$+3K?L{7TGHwrQM{dCp=Z*D}3kX7E-@sZnup!BImw z*T#a=+WcTwL78exTgBn|iNE3#EsOorO z*kt)gDzHiPt07fmisA2LWN?AymkdqTgr?=loT7z@d`wnlr6oN}@o|&JX!yPzC*Y8d zu6kWlTzE1)ckyBn+0Y^HMN+GA$wUO_LN6W>mxCo!0?oiQvT`z$jbSEu&{UHRU0E8# z%B^wOc@S!yhMT49Y)ww(Xta^8pmPCe@eI5C*ed96)AX9<>))nKx0(sci8gwob_1}4 z0DIL&vsJ1_s%<@y%U*-eX z5rN&(zef-5G~?@r79oZGW1d!WaTqQn0F6RIOa9tJ=0(kdd{d1{<*tHT#cCvl*i>YY zH+L7jq8xZNcTUBqj(S)ztTU!TM!RQ}In*n&Gn<>(60G7}4%WQL!o>hbJqNDSGwl#H z`4k+twp0cj%PsS+NKaxslAEu9!#U3xT1|_KB6`h=PI0SW`P9GTa7caD1}vKEglV8# zjKZR`pluCW19c2fM&ZG)c3T3Um;ir3y(tSCJ7Agl6|b524dy5El{^EQBG?E61H0XY z`bqg!;zhGhyMFl&(o=JWEJ8n~z)xI}A@C0d2hQGvw7nGv)?POU@(kS1m=%`|+^ika zXl8zjS?xqW$WlO?Ewa;vF~XbybHBor$f<%I&*t$F5fynwZlTGj|IjZtVfGa7l&tK} zW>I<69w(cZLu)QIVG|M2xzW@S+70NinQzk&Y0+3WT*cC)rx~04O-^<{JohU_&HL5XdUKW!uFy|i$FB|EMu0eUyW;gsf`XfIc!Z0V zeK&*hPL}f_cX=@iv>K%S5kL;cl_$v?n(Q9f_cChk8Lq$glT|=e+T*8O4H2n<=NGmn z+2*h+v;kBvF>}&0RDS>)B{1!_*XuE8A$Y=G8w^qGMtfudDBsD5>T5SB;Qo}fSkkiV ze^K^M(UthkwrD!&*tTsu>Dacdj_q`~V%r_twr$(Ct&_dKeeXE?fA&4&yASJWJ*}~- zel=@W)tusynfC_YqH4ll>4Eg`Xjs5F7Tj>tTLz<0N3)X<1px_d2yUY>X~y>>93*$) z5PuNMQLf9Bu?AAGO~a_|J2akO1M*@VYN^VxvP0F$2>;Zb9;d5Yfd8P%oFCCoZE$ z4#N$^J8rxYjUE_6{T%Y>MmWfHgScpuGv59#4u6fpTF%~KB^Ae`t1TD_^Ud#DhL+Dm zbY^VAM#MrAmFj{3-BpVSWph2b_Y6gCnCAombVa|1S@DU)2r9W<> zT5L8BB^er3zxKt1v(y&OYk!^aoQisqU zH(g@_o)D~BufUXcPt!Ydom)e|aW{XiMnes2z&rE?og>7|G+tp7&^;q?Qz5S5^yd$i z8lWr4g5nctBHtigX%0%XzIAB8U|T6&JsC4&^hZBw^*aIcuNO47de?|pGXJ4t}BB`L^d8tD`H`i zqrP8?#J@8T#;{^B!KO6J=@OWKhAerih(phML`(Rg7N1XWf1TN>=Z3Do{l_!d~DND&)O)D>ta20}@Lt77qSnVsA7>)uZAaT9bsB>u&aUQl+7GiY2|dAEg@%Al3i316y;&IhQL^8fw_nwS>f60M_-m+!5)S_6EPM7Y)(Nq^8gL7(3 zOiot`6Wy6%vw~a_H?1hLVzIT^i1;HedHgW9-P#)}Y6vF%C=P70X0Tk^z9Te@kPILI z_(gk!k+0%CG)%!WnBjjw*kAKs_lf#=5HXC00s-}oM-Q1aXYLj)(1d!_a7 z*Gg4Fe6F$*ujVjI|79Z5+Pr`us%zW@ln++2l+0hsngv<{mJ%?OfSo_3HJXOCys{Ug z00*YR-(fv<=&%Q!j%b-_ppA$JsTm^_L4x`$k{VpfLI(FMCap%LFAyq;#ns5bR7V+x zO!o;c5y~DyBPqdVQX)8G^G&jWkBy2|oWTw>)?5u}SAsI$RjT#)lTV&Rf8;>u*qXnb z8F%Xb=7#$m)83z%`E;49)t3fHInhtc#kx4wSLLms!*~Z$V?bTyUGiS&m>1P(952(H zuHdv=;o*{;5#X-uAyon`hP}d#U{uDlV?W?_5UjJvf%11hKwe&(&9_~{W)*y1nR5f_ z!N(R74nNK`y8>B!0Bt_Vr!;nc3W>~RiKtGSBkNlsR#-t^&;$W#)f9tTlZz>n*+Fjz z3zXZ;jf(sTM(oDzJt4FJS*8c&;PLTW(IQDFs_5QPy+7yhi1syPCarvqrHFcf&yTy)^O<1EBx;Ir`5W{TIM>{8w&PB>ro4;YD<5LF^TjTb0!zAP|QijA+1Vg>{Afv^% zmrkc4o6rvBI;Q8rj4*=AZacy*n8B{&G3VJc)so4$XUoie0)vr;qzPZVbb<#Fc=j+8CGBWe$n|3K& z_@%?{l|TzKSlUEO{U{{%Fz_pVDxs7i9H#bnbCw7@4DR=}r_qV!Zo~CvD4ZI*+j3kO zW6_=|S`)(*gM0Z;;}nj`73OigF4p6_NPZQ-Od~e$c_);;4-7sR>+2u$6m$Gf%T{aq zle>e3(*Rt(TPD}03n5)!Ca8Pu!V}m6v0o1;5<1h$*|7z|^(3$Y&;KHKTT}hV056wuF0Xo@mK-52~r=6^SI1NC%c~CC?n>yX6wPTgiWYVz!Sx^atLby9YNn1Rk{g?|pJaxD4|9cUf|V1_I*w zzxK)hRh9%zOl=*$?XUjly5z8?jPMy%vEN)f%T*|WO|bp5NWv@B(K3D6LMl!-6dQg0 zXNE&O>Oyf%K@`ngCvbGPR>HRg5!1IV$_}m@3dWB7x3t&KFyOJn9pxRXCAzFr&%37wXG;z^xaO$ekR=LJG ztIHpY8F5xBP{mtQidqNRoz= z@){+N3(VO5bD+VrmS^YjG@+JO{EOIW)9=F4v_$Ed8rZtHvjpiEp{r^c4F6Ic#ChlC zJX^DtSK+v(YdCW)^EFcs=XP7S>Y!4=xgmv>{S$~@h=xW-G4FF9?I@zYN$e5oF9g$# zb!eVU#J+NjLyX;yb)%SY)xJdvGhsnE*JEkuOVo^k5PyS=o#vq!KD46UTW_%R=Y&0G zFj6bV{`Y6)YoKgqnir2&+sl+i6foAn-**Zd1{_;Zb7Ki=u394C5J{l^H@XN`_6XTKY%X1AgQM6KycJ+= zYO=&t#5oSKB^pYhNdzPgH~aEGW2=ec1O#s-KG z71}LOg@4UEFtp3GY1PBemXpNs6UK-ax*)#$J^pC_me;Z$Je(OqLoh|ZrW*mAMBFn< zHttjwC&fkVfMnQeen8`Rvy^$pNRFVaiEN4Pih*Y3@jo!T0nsClN)pdrr9AYLcZxZ| zJ5Wlj+4q~($hbtuY zVQ7hl>4-+@6g1i`1a)rvtp-;b0>^`Dloy(#{z~ytgv=j4q^Kl}wD>K_Y!l~ zp(_&7sh`vfO(1*MO!B%<6E_bx1)&s+Ae`O)a|X=J9y~XDa@UB`m)`tSG4AUhoM=5& znWoHlA-(z@3n0=l{E)R-p8sB9XkV zZ#D8wietfHL?J5X0%&fGg@MH~(rNS2`GHS4xTo7L$>TPme+Is~!|79=^}QbPF>m%J zFMkGzSndiPO|E~hrhCeo@&Ea{M(ieIgRWMf)E}qeTxT8Q#g-!Lu*x$v8W^M^>?-g= zwMJ$dThI|~M06rG$Sv@C@tWR>_YgaG&!BAbkGggVQa#KdtDB)lMLNVLN|51C@F^y8 zCRvMB^{GO@j=cHfmy}_pCGbP%xb{pNN>? z?7tBz$1^zVaP|uaatYaIN+#xEN4jBzwZ|YI_)p(4CUAz1ZEbDk>J~Y|63SZaak~#0 zoYKruYsWHoOlC1(MhTnsdUOwQfz5p6-D0}4;DO$B;7#M{3lSE^jnTT;ns`>!G%i*F?@pR1JO{QTuD0U+~SlZxcc8~>IB{)@8p`P&+nDxNj`*gh|u?yrv$phpQcW)Us)bi`kT%qLj(fi{dWRZ%Es2!=3mI~UxiW0$-v3vUl?#g{p6eF zMEUAqo5-L0Ar(s{VlR9g=j7+lt!gP!UN2ICMokAZ5(Agd>})#gkA2w|5+<%-CuEP# zqgcM}u@3(QIC^Gx<2dbLj?cFSws_f3e%f4jeR?4M^M3cx1f+Qr6ydQ>n)kz1s##2w zk}UyQc+Z5G-d-1}{WzjkLXgS-2P7auWSJ%pSnD|Uivj5u!xk0 z_^-N9r9o;(rFDt~q1PvE#iJZ_f>J3gcP$)SOqhE~pD2|$=GvpL^d!r z6u=sp-CrMoF7;)}Zd7XO4XihC4ji?>V&(t^?@3Q&t9Mx=qex6C9d%{FE6dvU6%d94 zIE;hJ1J)cCqjv?F``7I*6bc#X)JW2b4f$L^>j{*$R`%5VHFi*+Q$2;nyieduE}qdS{L8y8F08yLs?w}{>8>$3236T-VMh@B zq-nujsb_1aUv_7g#)*rf9h%sFj*^mIcImRV*k~Vmw;%;YH(&ylYpy!&UjUVqqtfG` zox3esju?`unJJA_zKXRJP)rA3nXc$m^{S&-p|v|-0x9LHJm;XIww7C#R$?00l&Yyj z=e}gKUOpsImwW?N)+E(awoF@HyP^EhL+GlNB#k?R<2>95hz!h9sF@U20DHSB3~WMa zk90+858r@-+vWwkawJ)8ougd(i#1m3GLN{iSTylYz$brAsP%=&m$mQQrH$g%3-^VR zE%B`Vi&m8f3T~&myTEK28BDWCVzfWir1I?03;pX))|kY5ClO^+bae z*7E?g=3g7EiisYOrE+lA)2?Ln6q2*HLNpZEWMB|O-JI_oaHZB%CvYB(%=tU= zE*OY%QY58fW#RG5=gm0NR#iMB=EuNF@)%oZJ}nmm=tsJ?eGjia{e{yuU0l3{d^D@)kVDt=1PE)&tf_hHC%0MB znL|CRCPC}SeuVTdf>-QV70`0(EHizc21s^sU>y%hW0t!0&y<7}Wi-wGy>m%(-jsDj zP?mF|>p_K>liZ6ZP(w5(|9Ga%>tLgb$|doDDfkdW>Z z`)>V2XC?NJT26mL^@ zf+IKr27TfM!UbZ@?zRddC7#6ss1sw%CXJ4FWC+t3lHZupzM77m^=9 z&(a?-LxIq}*nvv)y?27lZ{j zifdl9hyJudyP2LpU$-kXctshbJDKS{WfulP5Dk~xU4Le4c#h^(YjJit4#R8_khheS z|8(>2ibaHES4+J|DBM7I#QF5u-*EdN{n=Kt@4Zt?@Tv{JZA{`4 zU#kYOv{#A&gGPwT+$Ud}AXlK3K7hYzo$(fBSFjrP{QQ zeaKg--L&jh$9N}`pu{Bs>?eDFPaWY4|9|foN%}i;3%;@4{dc+iw>m}{3rELqH21G! z`8@;w-zsJ1H(N3%|1B@#ioLOjib)j`EiJqPQVSbPSPVHCj6t5J&(NcWzBrzCiDt{4 zdlPAUKldz%6x5II1H_+jv)(xVL+a;P+-1hv_pM>gMRr%04@k;DTokASSKKhU1Qms| zrWh3a!b(J3n0>-tipg{a?UaKsP7?+|@A+1WPDiQIW1Sf@qDU~M_P65_s}7(gjTn0X zucyEm)o;f8UyshMy&>^SC3I|C6jR*R_GFwGranWZe*I>K+0k}pBuET&M~ z;Odo*ZcT?ZpduHyrf8E%IBFtv;JQ!N_m>!sV6ly$_1D{(&nO~w)G~Y`7sD3#hQk%^ zp}ucDF_$!6DAz*PM8yE(&~;%|=+h(Rn-=1Wykas_-@d&z#=S}rDf`4w(rVlcF&lF! z=1)M3YVz7orwk^BXhslJ8jR);sh^knJW(Qmm(QdSgIAIdlN4Te5KJisifjr?eB{FjAX1a0AB>d?qY4Wx>BZ8&}5K0fA+d{l8 z?^s&l8#j7pR&ijD?0b%;lL9l$P_mi2^*_OL+b}4kuLR$GAf85sOo02?Y#90}CCDiS zZ%rbCw>=H~CBO=C_JVV=xgDe%b4FaEFtuS7Q1##y686r%F6I)s-~2(}PWK|Z8M+Gu zl$y~5@#0Ka%$M<&Cv%L`a8X^@tY&T7<0|(6dNT=EsRe0%kp1Qyq!^43VAKYnr*A5~ zsI%lK1ewqO;0TpLrT9v}!@vJK{QoVa_+N4FYT#h?Y8rS1S&-G+m$FNMP?(8N`MZP zels(*?kK{{^g9DOzkuZXJ2;SrOQsp9T$hwRB1(phw1c7`!Q!by?Q#YsSM#I12RhU{$Q+{xj83axHcftEc$mNJ8_T7A-BQc*k(sZ+~NsO~xAA zxnbb%dam_fZlHvW7fKXrB~F&jS<4FD2FqY?VG?ix*r~MDXCE^WQ|W|WM;gsIA4lQP zJ2hAK@CF*3*VqPr2eeg6GzWFlICi8S>nO>5HvWzyZTE)hlkdC_>pBej*>o0EOHR|) z$?};&I4+_?wvL*g#PJ9)!bc#9BJu1(*RdNEn>#Oxta(VWeM40ola<0aOe2kSS~{^P zDJBd}0L-P#O-CzX*%+$#v;(x%<*SPgAje=F{Zh-@ucd2DA(yC|N_|ocs*|-!H%wEw z@Q!>siv2W;C^^j^59OAX03&}&D*W4EjCvfi(ygcL#~t8XGa#|NPO+*M@Y-)ctFA@I z-p7npT1#5zOLo>7q?aZpCZ=iecn3QYklP;gF0bq@>oyBq94f6C=;Csw3PkZ|5q=(c zfs`aw?II0e(h=|7o&T+hq&m$; zBrE09Twxd9BJ2P+QPN}*OdZ-JZV7%av@OM7v!!NL8R;%WFq*?{9T3{ct@2EKgc8h) zMxoM$SaF#p<`65BwIDfmXG6+OiK0e)`I=!A3E`+K@61f}0e z!2a*FOaDrOe>U`q%K!QN`&=&0C~)CaL3R4VY(NDt{Xz(Xpqru5=r#uQN1L$Je1*dkdqQ*=lofQaN%lO!<5z9ZlHgxt|`THd>2 zsWfU$9=p;yLyJyM^t zS2w9w?Bpto`@H^xJpZDKR1@~^30Il6oFGfk5%g6w*C+VM)+%R@gfIwNprOV5{F^M2 zO?n3DEzpT+EoSV-%OdvZvNF+pDd-ZVZ&d8 zKeIyrrfPN=EcFRCPEDCVflX#3-)Ik_HCkL(ejmY8vzcf-MTA{oHk!R2*36`O68$7J zf}zJC+bbQk--9Xm!u#lgLvx8TXx2J258E5^*IZ(FXMpq$2LUUvhWQPs((z1+2{Op% z?J}9k5^N=z;7ja~zi8a_-exIqWUBJwohe#4QJ`|FF*$C{lM18z^#hX6!5B8KAkLUX ziP=oti-gpV(BsLD{0(3*dw}4JxK23Y7M{BeFPucw!sHpY&l%Ws4pSm`+~V7;bZ%Dx zeI)MK=4vC&5#;2MT7fS?^ch9?2;%<8Jlu-IB&N~gg8t;6S-#C@!NU{`p7M8@2iGc& zg|JPg%@gCoCQ&s6JvDU&`X2S<57f(k8nJ1wvBu{8r?;q3_kpZZ${?|( z+^)UvR33sjSd)aT!UPkA;ylO6{aE3MQa{g%Mcf$1KONcjO@&g5zPHWtzM1rYC{_K> zgQNcs<{&X{OA=cEWw5JGqpr0O>x*Tfak2PE9?FuWtz^DDNI}rwAaT0(bdo-<+SJ6A z&}S%boGMWIS0L}=S>|-#kRX;e^sUsotry(MjE|3_9duvfc|nwF#NHuM-w7ZU!5ei8 z6Mkf>2)WunY2eU@C-Uj-A zG(z0Tz2YoBk>zCz_9-)4a>T46$(~kF+Y{#sA9MWH%5z#zNoz)sdXq7ZR_+`RZ%0(q zC7&GyS_|BGHNFl8Xa%@>iWh%Gr?=J5<(!OEjauj5jyrA-QXBjn0OAhJJ9+v=!LK`` z@g(`^*84Q4jcDL`OA&ZV60djgwG`|bcD*i50O}Q{9_noRg|~?dj%VtKOnyRs$Uzqg z191aWoR^rDX#@iSq0n z?9Sg$WSRPqSeI<}&n1T3!6%Wj@5iw5`*`Btni~G=&;J+4`7g#OQTa>u`{4ZZ(c@s$ zK0y;ySOGD-UTjREKbru{QaS>HjN<2)R%Nn-TZiQ(Twe4p@-saNa3~p{?^V9Nixz@a zykPv~<@lu6-Ng9i$Lrk(xi2Tri3q=RW`BJYOPC;S0Yly%77c727Yj-d1vF!Fuk{Xh z)lMbA69y7*5ufET>P*gXQrxsW+ zz)*MbHZv*eJPEXYE<6g6_M7N%#%mR{#awV3i^PafNv(zyI)&bH?F}2s8_rR(6%!V4SOWlup`TKAb@ee>!9JKPM=&8g#BeYRH9FpFybxBXQI2|g}FGJfJ+ zY-*2hB?o{TVL;Wt_ek;AP5PBqfDR4@Z->_182W z{P@Mc27j6jE*9xG{R$>6_;i=y{qf(c`5w9fa*`rEzX6t!KJ(p1H|>J1pC-2zqWENF zmm=Z5B4u{cY2XYl(PfrInB*~WGWik3@1oRhiMOS|D;acnf-Bs(QCm#wR;@Vf!hOPJ zgjhDCfDj$HcyVLJ=AaTbQ{@vIv14LWWF$=i-BDoC11}V;2V8A`S>_x)vIq44-VB-v z*w-d}$G+Ql?En8j!~ZkCpQ$|cA0|+rrY>tiCeWxkRGPoarxlGU2?7%k#F693RHT24 z-?JsiXlT2PTqZqNb&sSc>$d;O4V@|b6VKSWQb~bUaWn1Cf0+K%`Q&Wc<>mQ>*iEGB zbZ;aYOotBZ{vH3y<0A*L0QVM|#rf*LIsGx(O*-7)r@yyBIzJnBFSKBUSl1e|8lxU* zzFL+YDVVkIuzFWeJ8AbgN&w(4-7zbiaMn{5!JQXu)SELk*CNL+Fro|2v|YO)1l15t zs(0^&EB6DPMyaqvY>=KL>)tEpsn;N5Q#yJj<9}ImL((SqErWN3Q=;tBO~ExTCs9hB z2E$7eN#5wX4<3m^5pdjm#5o>s#eS_Q^P)tm$@SawTqF*1dj_i#)3};JslbLKHXl_N z)Fxzf>FN)EK&Rz&*|6&%Hs-^f{V|+_vL1S;-1K-l$5xiC@}%uDuwHYhmsV?YcOUlk zOYkG5v2+`+UWqpn0aaaqrD3lYdh0*!L`3FAsNKu=Q!vJu?Yc8n|CoYyDo_`r0mPoo z8>XCo$W4>l(==h?2~PoRR*kEe)&IH{1sM41mO#-36`02m#nTX{r*r`Q5rZ2-sE|nA zhnn5T#s#v`52T5|?GNS`%HgS2;R(*|^egNPDzzH_z^W)-Q98~$#YAe)cEZ%vge965AS_am#DK#pjPRr-!^za8>`kksCAUj(Xr*1NW5~e zpypt_eJpD&4_bl_y?G%>^L}=>xAaV>KR6;^aBytqpiHe%!j;&MzI_>Sx7O%F%D*8s zSN}cS^<{iiK)=Ji`FpO#^zY!_|D)qeRNAtgmH)m;qC|mq^j(|hL`7uBz+ULUj37gj zksdbnU+LSVo35riSX_4z{UX=%n&}7s0{WuZYoSfwAP`8aKN9P@%e=~1`~1ASL-z%# zw>DO&ixr}c9%4InGc*_y42bdEk)ZdG7-mTu0bD@_vGAr*NcFoMW;@r?@LUhRI zCUJgHb`O?M3!w)|CPu~ej%fddw20lod?Ufp8Dmt0PbnA0J%KE^2~AIcnKP()025V> zG>noSM3$5Btmc$GZoyP^v1@Poz0FD(6YSTH@aD0}BXva?LphAiSz9f&Y(aDAzBnUh z?d2m``~{z;{}kZJ>a^wYI?ry(V9hIoh;|EFc0*-#*`$T0DRQ1;WsqInG;YPS+I4{g zJGpKk%%Sdc5xBa$Q^_I~(F97eqDO7AN3EN0u)PNBAb+n+ zWBTxQx^;O9o0`=g+Zrt_{lP!sgWZHW?8bLYS$;1a@&7w9rD9|Ge;Gb?sEjFoF9-6v z#!2)t{DMHZ2@0W*fCx;62d#;jouz`R5Y(t{BT=$N4yr^^o$ON8d{PQ=!O zX17^CrdM~7D-;ZrC!||<+FEOxI_WI3CA<35va%4v>gc zEX-@h8esj=a4szW7x{0g$hwoWRQG$yK{@3mqd-jYiVofJE!Wok1* znV7Gm&Ssq#hFuvj1sRyHg(6PFA5U*Q8Rx>-blOs=lb`qa{zFy&n4xY;sd$fE+<3EI z##W$P9M{B3c3Si9gw^jlPU-JqD~Cye;wr=XkV7BSv#6}DrsXWFJ3eUNrc%7{=^sP> zrp)BWKA9<}^R9g!0q7yWlh;gr_TEOD|#BmGq<@IV;ueg+D2}cjpp+dPf&Q(36sFU&K8}hA85U61faW&{ zlB`9HUl-WWCG|<1XANN3JVAkRYvr5U4q6;!G*MTdSUt*Mi=z_y3B1A9j-@aK{lNvx zK%p23>M&=KTCgR!Ee8c?DAO2_R?B zkaqr6^BSP!8dHXxj%N1l+V$_%vzHjqvu7p@%Nl6;>y*S}M!B=pz=aqUV#`;h%M0rU zHfcog>kv3UZAEB*g7Er@t6CF8kHDmKTjO@rejA^ULqn!`LwrEwOVmHx^;g|5PHm#B zZ+jjWgjJ!043F+&#_;D*mz%Q60=L9Ove|$gU&~As5^uz@2-BfQ!bW)Khn}G+Wyjw- z19qI#oB(RSNydn0t~;tAmK!P-d{b-@@E5|cdgOS#!>%#Rj6ynkMvaW@37E>@hJP^8 z2zk8VXx|>#R^JCcWdBCy{0nPmYFOxN55#^-rlqobe0#L6)bi?E?SPymF*a5oDDeSd zO0gx?#KMoOd&G(2O@*W)HgX6y_aa6iMCl^~`{@UR`nMQE`>n_{_aY5nA}vqU8mt8H z`oa=g0SyiLd~BxAj2~l$zRSDHxvDs;I4>+M$W`HbJ|g&P+$!U7-PHX4RAcR0szJ*( ze-417=bO2q{492SWrqDK+L3#ChUHtz*@MP)e^%@>_&#Yk^1|tv@j4%3T)diEX zATx4K*hcO`sY$jk#jN5WD<=C3nvuVsRh||qDHnc~;Kf59zr0;c7VkVSUPD%NnnJC_ zl3F^#f_rDu8l}l8qcAz0FFa)EAt32IUy_JLIhU_J^l~FRH&6-ivSpG2PRqzDdMWft>Zc(c)#tb%wgmWN%>IOPm zZi-noqS!^Ftb81pRcQi`X#UhWK70hy4tGW1mz|+vI8c*h@ zfFGJtW3r>qV>1Z0r|L>7I3un^gcep$AAWfZHRvB|E*kktY$qQP_$YG60C@X~tTQjB3%@`uz!qxtxF+LE!+=nrS^07hn` zEgAp!h|r03h7B!$#OZW#ACD+M;-5J!W+{h|6I;5cNnE(Y863%1(oH}_FTW})8zYb$7czP zg~Szk1+_NTm6SJ0MS_|oSz%e(S~P-&SFp;!k?uFayytV$8HPwuyELSXOs^27XvK-D zOx-Dl!P|28DK6iX>p#Yb%3`A&CG0X2S43FjN%IB}q(!hC$fG}yl1y9W&W&I@KTg6@ zK^kpH8=yFuP+vI^+59|3%Zqnb5lTDAykf z9S#X`3N(X^SpdMyWQGOQRjhiwlj!0W-yD<3aEj^&X%=?`6lCy~?`&WSWt z?U~EKFcCG_RJ(Qp7j=$I%H8t)Z@6VjA#>1f@EYiS8MRHZphp zMA_5`znM=pzUpBPO)pXGYpQ6gkine{6u_o!P@Q+NKJ}k!_X7u|qfpAyIJb$_#3@wJ z<1SE2Edkfk9C!0t%}8Yio09^F`YGzpaJHGk*-ffsn85@)%4@`;Fv^8q(-Wk7r=Q8p zT&hD`5(f?M{gfzGbbwh8(}G#|#fDuk7v1W)5H9wkorE0ZZjL0Q1=NRGY>zwgfm81DdoaVwNH;or{{eSyybt)m<=zXoA^RALYG-2t zouH|L*BLvmm9cdMmn+KGopyR@4*=&0&4g|FLoreZOhRmh=)R0bg~ zT2(8V_q7~42-zvb)+y959OAv!V$u(O3)%Es0M@CRFmG{5sovIq4%8Ahjk#*5w{+)+ zMWQoJI_r$HxL5km1#6(e@{lK3Udc~n0@g`g$s?VrnQJ$!oPnb?IHh-1qA`Rz$)Ai< z6w$-MJW-gKNvOhL+XMbE7&mFt`x1KY>k4(!KbbpZ`>`K@1J<(#vVbjx@Z@(6Q}MF# zMnbr-f55(cTa^q4+#)=s+ThMaV~E`B8V=|W_fZWDwiso8tNMTNse)RNBGi=gVwgg% zbOg8>mbRN%7^Um-7oj4=6`$|(K7!+t^90a{$18Z>}<#!bm%ZEFQ{X(yBZMc>lCz0f1I2w9Sq zuGh<9<=AO&g6BZte6hn>Qmvv;Rt)*cJfTr2=~EnGD8P$v3R|&1RCl&7)b+`=QGapi zPbLg_pxm`+HZurtFZ;wZ=`Vk*do~$wB zxoW&=j0OTbQ=Q%S8XJ%~qoa3Ea|au5o}_(P;=!y-AjFrERh%8la!z6Fn@lR?^E~H12D?8#ht=1F;7@o4$Q8GDj;sSC%Jfn01xgL&%F2 zwG1|5ikb^qHv&9hT8w83+yv&BQXOQyMVJSBL(Ky~p)gU3#%|blG?IR9rP^zUbs7rOA0X52Ao=GRt@C&zlyjNLv-} z9?*x{y(`509qhCV*B47f2hLrGl^<@SuRGR!KwHei?!CM10Tq*YDIoBNyRuO*>3FU? zHjipIE#B~y3FSfOsMfj~F9PNr*H?0oHyYB^G(YyNh{SxcE(Y-`x5jFMKb~HO*m+R% zrq|ic4fzJ#USpTm;X7K+E%xsT_3VHKe?*uc4-FsILUH;kL>_okY(w`VU*8+l>o>Jm ziU#?2^`>arnsl#)*R&nf_%>A+qwl%o{l(u)M?DK1^mf260_oteV3#E_>6Y4!_hhVD zM8AI6MM2V*^_M^sQ0dmHu11fy^kOqXqzpr?K$`}BKWG`=Es(9&S@K@)ZjA{lj3ea7_MBP zk(|hBFRjHVMN!sNUkrB;(cTP)T97M$0Dtc&UXSec<+q?y>5=)}S~{Z@ua;1xt@=T5 zI7{`Z=z_X*no8s>mY;>BvEXK%b`a6(DTS6t&b!vf_z#HM{Uoy_5fiB(zpkF{})ruka$iX*~pq1ZxD?q68dIo zIZSVls9kFGsTwvr4{T_LidcWtt$u{kJlW7moRaH6+A5hW&;;2O#$oKyEN8kx`LmG)Wfq4ykh+q{I3|RfVpkR&QH_x;t41Uw z`P+tft^E2B$domKT@|nNW`EHwyj>&}K;eDpe z1bNOh=fvIfk`&B61+S8ND<(KC%>y&?>opCnY*r5M+!UrWKxv0_QvTlJc>X#AaI^xo zaRXL}t5Ej_Z$y*|w*$6D+A?Lw-CO-$itm^{2Ct82-<0IW)0KMNvJHgBrdsIR0v~=H z?n6^}l{D``Me90`^o|q!olsF?UX3YSq^6Vu>Ijm>>PaZI8G@<^NGw{Cx&%|PwYrfw zR!gX_%AR=L3BFsf8LxI|K^J}deh0ZdV?$3r--FEX`#INxsOG6_=!v)DI>0q|BxT)z z-G6kzA01M?rba+G_mwNMQD1mbVbNTWmBi*{s_v_Ft9m2Avg!^78(QFu&n6mbRJ2bA zv!b;%yo{g*9l2)>tsZJOOp}U~8VUH`}$ z8p_}t*XIOehezolNa-a2x0BS})Y9}&*TPgua{Ewn-=wVrmJUeU39EKx+%w%=ixQWK zDLpwaNJs65#6o7Ln7~~X+p_o2BR1g~VCfxLzxA{HlWAI6^H;`juI=&r1jQrUv_q0Z z1Ja-tjdktrrP>GOC*#p?*xfQU5MqjMsBe!9lh(u8)w$e@Z|>aUHI5o;MGw*|Myiz3 z-f0;pHg~Q#%*Kx8MxH%AluVXjG2C$)WL-K63@Q`#y9_k_+}eR(x4~dp7oV-ek0H>I zgy8p#i4GN{>#v=pFYUQT(g&b$OeTy-X_#FDgNF8XyfGY6R!>inYn8IR2RDa&O!(6< znXs{W!bkP|s_YI*Yx%4stI`=ZO45IK6rBs`g7sP40ic}GZ58s?Mc$&i`kq_tfci>N zIHrC0H+Qpam1bNa=(`SRKjixBTtm&e`j9porEci!zdlg1RI0Jw#b(_Tb@RQK1Zxr_ z%7SUeH6=TrXt3J@js`4iDD0=IoHhK~I7^W8^Rcp~Yaf>2wVe|Hh1bUpX9ATD#moByY57-f2Ef1TP^lBi&p5_s7WGG9|0T}dlfxOx zXvScJO1Cnq`c`~{Dp;{;l<-KkCDE+pmexJkd}zCgE{eF=)K``-qC~IT6GcRog_)!X z?fK^F8UDz$(zFUrwuR$qro5>qqn>+Z%<5>;_*3pZ8QM|yv9CAtrAx;($>4l^_$_-L z*&?(77!-=zvnCVW&kUcZMb6;2!83si518Y%R*A3JZ8Is|kUCMu`!vxDgaWjs7^0j( ziTaS4HhQ)ldR=r)_7vYFUr%THE}cPF{0H45FJ5MQW^+W>P+eEX2kLp3zzFe*-pFVA zdDZRybv?H|>`9f$AKVjFWJ=wegO7hOOIYCtd?Vj{EYLT*^gl35|HQ`R=ti+ADm{jyQE7K@kdjuqJhWVSks>b^ zxha88-h3s;%3_5b1TqFCPTxVjvuB5U>v=HyZ$?JSk+&I%)M7KE*wOg<)1-Iy)8-K! z^XpIt|0ibmk9RtMmlUd7#Ap3Q!q9N4atQy)TmrhrFhfx1DAN`^vq@Q_SRl|V z#lU<~n67$mT)NvHh`%als+G-)x1`Y%4Bp*6Un5Ri9h=_Db zA-AdP!f>f0m@~>7X#uBM?diI@)Egjuz@jXKvm zJo+==juc9_<;CqeRaU9_Mz@;3e=E4=6TK+c`|uu#pIqhSyNm`G(X)&)B`8q0RBv#> z`gGlw(Q=1Xmf55VHj%C#^1lpc>LY8kfA@|rlC1EA<1#`iuyNO z(=;irt{_&K=i4)^x%;U(Xv<)+o=dczC5H3W~+e|f~{*ucxj@{Yi-cw^MqYr3fN zF5D+~!wd$#al?UfMnz(@K#wn`_5na@rRr8XqN@&M&FGEC@`+OEv}sI1hw>Up0qAWf zL#e4~&oM;TVfjRE+10B_gFlLEP9?Q-dARr3xi6nQqnw>k-S;~b z;!0s2VS4}W8b&pGuK=7im+t(`nz@FnT#VD|!)eQNp-W6)@>aA+j~K*H{$G`y2|QHY z|Hmy+CR@#jWY4~)lr1qBJB_RfHJFfP<}pK5(#ZZGSqcpyS&}01LnTWk5fzmXMGHkJ zTP6L^B+uj;lmB_W<~4=${+v0>z31M!-_O@o-O9GyW)j_mjx}!0@br_LE-7SIuPP84 z;5=O(U*g_um0tyG|61N@d9lEuOeiRd+#NY^{nd5;-CVlw&Ap7J?qwM^?E29wvS}2d zbzar4Fz&RSR(-|s!Z6+za&Z zY#D<5q_JUktIzvL0)yq_kLWG6DO{ri=?c!y!f(Dk%G{8)k`Gym%j#!OgXVDD3;$&v@qy#ISJfp=Vm>pls@9-mapVQChAHHd-x+OGx)(*Yr zC1qDUTZ6mM(b_hi!TuFF2k#8uI2;kD70AQ&di$L*4P*Y-@p`jdm%_c3f)XhYD^6M8&#Y$ZpzQMcR|6nsH>b=*R_Von!$BTRj7yGCXokoAQ z&ANvx0-Epw`QIEPgI(^cS2f(Y85yV@ygI{ewyv5Frng)e}KCZF7JbR(&W618_dcEh(#+^zZFY;o<815<5sOHQdeax9_!PyM&;{P zkBa5xymca0#)c#tke@3KNEM8a_mT&1gm;p&&JlMGH(cL(b)BckgMQ^9&vRwj!~3@l zY?L5}=Jzr080OGKb|y`ee(+`flQg|!lo6>=H)X4`$Gz~hLmu2a%kYW_Uu8x09Pa0J zKZ`E$BKJ=2GPj_3l*TEcZ*uYRr<*J^#5pILTT;k_cgto1ZL-%slyc16J~OH-(RgDA z%;EjEnoUkZ&acS{Q8`{i6T5^nywgqQI5bDIymoa7CSZG|WWVk>GM9)zy*bNih|QIm z%0+(Nnc*a_xo;$=!HQYaapLms>J1ToyjtFByY`C2H1wT#178#4+|{H0BBqtCdd$L% z_3Hc60j@{t9~MjM@LBalR&6@>B;9?r<7J~F+WXyYu*y3?px*=8MAK@EA+jRX8{CG?GI-< z54?Dc9CAh>QTAvyOEm0^+x;r2BWX|{3$Y7)L5l*qVE*y0`7J>l2wCmW zL1?|a`pJ-l{fb_N;R(Z9UMiSj6pQjOvQ^%DvhIJF!+Th7jO2~1f1N+(-TyCFYQZYw z4)>7caf^Ki_KJ^Zx2JUb z&$3zJy!*+rCV4%jqwyuNY3j1ZEiltS0xTzd+=itTb;IPYpaf?8Y+RSdVdpacB(bVQ zC(JupLfFp8y43%PMj2}T|VS@%LVp>hv4Y!RPMF?pp8U_$xCJ)S zQx!69>bphNTIb9yn*_yfj{N%bY)t{L1cs8<8|!f$;UQ*}IN=2<6lA;x^(`8t?;+ST zh)z4qeYYgZkIy{$4x28O-pugO&gauRh3;lti9)9Pvw+^)0!h~%m&8Q!AKX%urEMnl z?yEz?g#ODn$UM`+Q#$Q!6|zsq_`dLO5YK-6bJM6ya>}H+vnW^h?o$z;V&wvuM$dR& zeEq;uUUh$XR`TWeC$$c&Jjau2it3#%J-y}Qm>nW*s?En?R&6w@sDXMEr#8~$=b(gk zwDC3)NtAP;M2BW_lL^5ShpK$D%@|BnD{=!Tq)o(5@z3i7Z){} zGr}Exom_qDO{kAVkZ*MbLNHE666Kina#D{&>Jy%~w7yX$oj;cYCd^p9zy z8*+wgSEcj$4{WxKmCF(5o7U4jqwEvO&dm1H#7z}%VXAbW&W24v-tS6N3}qrm1OnE)fUkoE8yMMn9S$?IswS88tQWm4#Oid#ckgr6 zRtHm!mfNl-`d>O*1~d7%;~n+{Rph6BBy^95zqI{K((E!iFQ+h*C3EsbxNo_aRm5gj zKYug($r*Q#W9`p%Bf{bi6;IY0v`pB^^qu)gbg9QHQ7 zWBj(a1YSu)~2RK8Pi#C>{DMlrqFb9e_RehEHyI{n?e3vL_}L>kYJC z_ly$$)zFi*SFyNrnOt(B*7E$??s67EO%DgoZL2XNk8iVx~X_)o++4oaK1M|ou73vA0K^503j@uuVmLcHH4ya-kOIDfM%5%(E z+Xpt~#7y2!KB&)PoyCA+$~DXqxPxxALy!g-O?<9+9KTk4Pgq4AIdUkl`1<1#j^cJg zgU3`0hkHj_jxV>`Y~%LAZl^3o0}`Sm@iw7kwff{M%VwtN)|~!p{AsfA6vB5UolF~d zHWS%*uBDt<9y!9v2Xe|au&1j&iR1HXCdyCjxSgG*L{wmTD4(NQ=mFjpa~xooc6kju z`~+d{j7$h-;HAB04H!Zscu^hZffL#9!p$)9>sRI|Yovm)g@F>ZnosF2EgkU3ln0bR zTA}|+E(tt)!SG)-bEJi_0m{l+(cAz^pi}`9=~n?y&;2eG;d9{M6nj>BHGn(KA2n|O zt}$=FPq!j`p&kQ8>cirSzkU0c08%8{^Qyqi-w2LoO8)^E7;;I1;HQ6B$u0nNaX2CY zSmfi)F`m94zL8>#zu;8|{aBui@RzRKBlP1&mfFxEC@%cjl?NBs`cr^nm){>;$g?rhKr$AO&6qV_Wbn^}5tfFBry^e1`%du2~o zs$~dN;S_#%iwwA_QvmMjh%Qo?0?rR~6liyN5Xmej8(*V9ym*T`xAhHih-v$7U}8=dfXi2i*aAB!xM(Xekg*ix@r|ymDw*{*s0?dlVys2e)z62u1 z+k3esbJE=-P5S$&KdFp+2H7_2e=}OKDrf( z9-207?6$@f4m4B+9E*e((Y89!q?zH|mz_vM>kp*HGXldO0Hg#!EtFhRuOm$u8e~a9 z5(roy7m$Kh+zjW6@zw{&20u?1f2uP&boD}$#Zy)4o&T;vyBoqFiF2t;*g=|1=)PxB z8eM3Mp=l_obbc?I^xyLz?4Y1YDWPa+nm;O<$Cn;@ane616`J9OO2r=rZr{I_Kizyc zP#^^WCdIEp*()rRT+*YZK>V@^Zs=ht32x>Kwe zab)@ZEffz;VM4{XA6e421^h~`ji5r%)B{wZu#hD}f3$y@L0JV9f3g{-RK!A?vBUA}${YF(vO4)@`6f1 z-A|}e#LN{)(eXloDnX4Vs7eH|<@{r#LodP@Nz--$Dg_Par%DCpu2>2jUnqy~|J?eZ zBG4FVsz_A+ibdwv>mLp>P!(t}E>$JGaK$R~;fb{O3($y1ssQQo|5M;^JqC?7qe|hg zu0ZOqeFcp?qVn&Qu7FQJ4hcFi&|nR!*j)MF#b}QO^lN%5)4p*D^H+B){n8%VPUzi! zDihoGcP71a6!ab`l^hK&*dYrVYzJ0)#}xVrp!e;lI!+x+bfCN0KXwUAPU9@#l7@0& QuEJmfE|#`Dqx|px0L@K;Y5)KL literal 54329 zcmagFV|ZrKvM!pAZQHhO+qP}9lTNj?q^^Y^VFp)SH8qbSJ)2BQ2giqr}t zFG7D6)c?v~^Z#E_K}1nTQbJ9gQ9<%vVRAxVj)8FwL5_iTdUB>&m3fhE=kRWl;g`&m z!W5kh{WsV%fO*%je&j+Lv4xxK~zsEYQls$Q-p&dwID|A)!7uWtJF-=Tm1{V@#x*+kUI$=%KUuf2ka zjiZ{oiL1MXE2EjciJM!jrjFNwCh`~hL>iemrqwqnX?T*MX;U>>8yRcZb{Oy+VKZos zLiFKYPw=LcaaQt8tj=eoo3-@bG_342HQ%?jpgAE?KCLEHC+DmjxAfJ%Og^$dpC8Xw zAcp-)tfJm}BPNq_+6m4gBgBm3+CvmL>4|$2N$^Bz7W(}fz1?U-u;nE`+9`KCLuqg} zwNstNM!J4Uw|78&Y9~9>MLf56to!@qGkJw5Thx%zkzj%Ek9Nn1QA@8NBXbwyWC>9H z#EPwjMNYPigE>*Ofz)HfTF&%PFj$U6mCe-AFw$U%-L?~-+nSXHHKkdgC5KJRTF}`G zE_HNdrE}S0zf4j{r_f-V2imSqW?}3w-4=f@o@-q+cZgaAbZ((hn))@|eWWhcT2pLpTpL!;_5*vM=sRL8 zqU##{U#lJKuyqW^X$ETU5ETeEVzhU|1m1750#f}38_5N9)B_2|v@1hUu=Kt7-@dhA zq_`OMgW01n`%1dB*}C)qxC8q;?zPeF_r;>}%JYmlER_1CUbKa07+=TV45~symC*g8 zW-8(gag#cAOuM0B1xG8eTp5HGVLE}+gYTmK=`XVVV*U!>H`~j4+ROIQ+NkN$LY>h4 zqpwdeE_@AX@PL};e5vTn`Ro(EjHVf$;^oiA%@IBQq>R7_D>m2D4OwwEepkg}R_k*M zM-o;+P27087eb+%*+6vWFCo9UEGw>t&WI17Pe7QVuoAoGHdJ(TEQNlJOqnjZ8adCb zI`}op16D@v7UOEo%8E-~m?c8FL1utPYlg@m$q@q7%mQ4?OK1h%ODjTjFvqd!C z-PI?8qX8{a@6d&Lb_X+hKxCImb*3GFemm?W_du5_&EqRq!+H?5#xiX#w$eLti-?E$;Dhu`{R(o>LzM4CjO>ICf z&DMfES#FW7npnbcuqREgjPQM#gs6h>`av_oEWwOJZ2i2|D|0~pYd#WazE2Bbsa}X@ zu;(9fi~%!VcjK6)?_wMAW-YXJAR{QHxrD5g(ou9mR6LPSA4BRG1QSZT6A?kelP_g- zH(JQjLc!`H4N=oLw=f3{+WmPA*s8QEeEUf6Vg}@!xwnsnR0bl~^2GSa5vb!Yl&4!> zWb|KQUsC$lT=3A|7vM9+d;mq=@L%uWKwXiO9}a~gP4s_4Yohc!fKEgV7WbVo>2ITbE*i`a|V!^p@~^<={#?Gz57 zyPWeM2@p>D*FW#W5Q`1`#5NW62XduP1XNO(bhg&cX`-LYZa|m-**bu|>}S;3)eP8_ zpNTnTfm8 ze+7wDH3KJ95p)5tlwk`S7mbD`SqHnYD*6`;gpp8VdHDz%RR_~I_Ar>5)vE-Pgu7^Y z|9Px+>pi3!DV%E%4N;ii0U3VBd2ZJNUY1YC^-e+{DYq+l@cGtmu(H#Oh%ibUBOd?C z{y5jW3v=0eV0r@qMLgv1JjZC|cZ9l9Q)k1lLgm))UR@#FrJd>w^`+iy$c9F@ic-|q zVHe@S2UAnc5VY_U4253QJxm&Ip!XKP8WNcnx9^cQ;KH6PlW8%pSihSH2(@{2m_o+m zr((MvBja2ctg0d0&U5XTD;5?d?h%JcRJp{_1BQW1xu&BrA3(a4Fh9hon-ly$pyeHq zG&;6q?m%NJ36K1Sq_=fdP(4f{Hop;_G_(i?sPzvB zDM}>*(uOsY0I1j^{$yn3#U(;B*g4cy$-1DTOkh3P!LQ;lJlP%jY8}Nya=h8$XD~%Y zbV&HJ%eCD9nui-0cw!+n`V~p6VCRqh5fRX z8`GbdZ@73r7~myQLBW%db;+BI?c-a>Y)m-FW~M=1^|<21_Sh9RT3iGbO{o-hpN%d6 z7%++#WekoBOP^d0$$|5npPe>u3PLvX_gjH2x(?{&z{jJ2tAOWTznPxv-pAv<*V7r$ z6&glt>7CAClWz6FEi3bToz-soY^{ScrjwVPV51=>n->c(NJngMj6TyHty`bfkF1hc zkJS%A@cL~QV0-aK4>Id!9dh7>0IV;1J9(myDO+gv76L3NLMUm9XyPauvNu$S<)-|F zZS}(kK_WnB)Cl`U?jsdYfAV4nrgzIF@+%1U8$poW&h^c6>kCx3;||fS1_7JvQT~CV zQ8Js+!p)3oW>Df(-}uqC`Tcd%E7GdJ0p}kYj5j8NKMp(KUs9u7?jQ94C)}0rba($~ zqyBx$(1ae^HEDG`Zc@-rXk1cqc7v0wibOR4qpgRDt#>-*8N3P;uKV0CgJE2SP>#8h z=+;i_CGlv+B^+$5a}SicVaSeaNn29K`C&=}`=#Nj&WJP9Xhz4mVa<+yP6hkrq1vo= z1rX4qg8dc4pmEvq%NAkpMK>mf2g?tg_1k2%v}<3`$6~Wlq@ItJ*PhHPoEh1Yi>v57 z4k0JMO)*=S`tKvR5gb-(VTEo>5Y>DZJZzgR+j6{Y`kd|jCVrg!>2hVjz({kZR z`dLlKhoqT!aI8=S+fVp(5*Dn6RrbpyO~0+?fy;bm$0jmTN|t5i6rxqr4=O}dY+ROd zo9Et|x}!u*xi~>-y>!M^+f&jc;IAsGiM_^}+4|pHRn{LThFFpD{bZ|TA*wcGm}XV^ zr*C6~@^5X-*R%FrHIgo-hJTBcyQ|3QEj+cSqp#>&t`ZzB?cXM6S(lRQw$I2?m5=wd z78ki`R?%;o%VUhXH?Z#(uwAn9$m`npJ=cA+lHGk@T7qq_M6Zoy1Lm9E0UUysN)I_x zW__OAqvku^>`J&CB=ie@yNWsaFmem}#L3T(x?a`oZ+$;3O-icj2(5z72Hnj=9Z0w% z<2#q-R=>hig*(t0^v)eGq2DHC%GymE-_j1WwBVGoU=GORGjtaqr0BNigOCqyt;O(S zKG+DoBsZU~okF<7ahjS}bzwXxbAxFfQAk&O@>LsZMsZ`?N?|CDWM(vOm%B3CBPC3o z%2t@%H$fwur}SSnckUm0-k)mOtht`?nwsDz=2#v=RBPGg39i#%odKq{K^;bTD!6A9 zskz$}t)sU^=a#jLZP@I=bPo?f-L}wpMs{Tc!m7-bi!Ldqj3EA~V;4(dltJmTXqH0r z%HAWKGutEc9vOo3P6Q;JdC^YTnby->VZ6&X8f{obffZ??1(cm&L2h7q)*w**+sE6dG*;(H|_Q!WxU{g)CeoT z(KY&bv!Usc|m+Fqfmk;h&RNF|LWuNZ!+DdX*L=s-=_iH=@i` z?Z+Okq^cFO4}_n|G*!)Wl_i%qiMBaH8(WuXtgI7EO=M>=i_+;MDjf3aY~6S9w0K zUuDO7O5Ta6+k40~xh~)D{=L&?Y0?c$s9cw*Ufe18)zzk%#ZY>Tr^|e%8KPb0ht`b( zuP@8#Ox@nQIqz9}AbW0RzE`Cf>39bOWz5N3qzS}ocxI=o$W|(nD~@EhW13Rj5nAp; zu2obEJa=kGC*#3=MkdkWy_%RKcN=?g$7!AZ8vBYKr$ePY(8aIQ&yRPlQ=mudv#q$q z4%WzAx=B{i)UdLFx4os?rZp6poShD7Vc&mSD@RdBJ=_m^&OlkEE1DFU@csgKcBifJ zz4N7+XEJhYzzO=86 z#%eBQZ$Nsf2+X0XPHUNmg#(sNt^NW1Y0|M(${e<0kW6f2q5M!2YE|hSEQ*X-%qo(V zHaFwyGZ0on=I{=fhe<=zo{=Og-_(to3?cvL4m6PymtNsdDINsBh8m>a%!5o3s(en) z=1I z6O+YNertC|OFNqd6P=$gMyvmfa`w~p9*gKDESFqNBy(~Zw3TFDYh}$iudn)9HxPBi zdokK@o~nu?%imcURr5Y~?6oo_JBe}t|pU5qjai|#JDyG=i^V~7+a{dEnO<(y>ahND#_X_fcEBNiZ)uc&%1HVtx8Ts z*H_Btvx^IhkfOB#{szN*n6;y05A>3eARDXslaE>tnLa>+`V&cgho?ED+&vv5KJszf zG4@G;7i;4_bVvZ>!mli3j7~tPgybF5|J6=Lt`u$D%X0l}#iY9nOXH@(%FFJLtzb%p zzHfABnSs;v-9(&nzbZytLiqqDIWzn>JQDk#JULcE5CyPq_m#4QV!}3421haQ+LcfO*>r;rg6K|r#5Sh|y@h1ao%Cl)t*u`4 zMTP!deC?aL7uTxm5^nUv#q2vS-5QbBKP|drbDXS%erB>fYM84Kpk^au99-BQBZR z7CDynflrIAi&ahza+kUryju5LR_}-Z27g)jqOc(!Lx9y)e z{cYc&_r947s9pteaa4}dc|!$$N9+M38sUr7h(%@Ehq`4HJtTpA>B8CLNO__@%(F5d z`SmX5jbux6i#qc}xOhumzbAELh*Mfr2SW99=WNOZRZgoCU4A2|4i|ZVFQt6qEhH#B zK_9G;&h*LO6tB`5dXRSBF0hq0tk{2q__aCKXYkP#9n^)@cq}`&Lo)1KM{W+>5mSed zKp~=}$p7>~nK@va`vN{mYzWN1(tE=u2BZhga5(VtPKk(*TvE&zmn5vSbjo zZLVobTl%;t@6;4SsZ>5+U-XEGUZGG;+~|V(pE&qqrp_f~{_1h@5ZrNETqe{bt9ioZ z#Qn~gWCH!t#Ha^n&fT2?{`}D@s4?9kXj;E;lWV9Zw8_4yM0Qg-6YSsKgvQ*fF{#Pq z{=(nyV>#*`RloBVCs;Lp*R1PBIQOY=EK4CQa*BD0MsYcg=opP?8;xYQDSAJBeJpw5 zPBc_Ft9?;<0?pBhCmOtWU*pN*;CkjJ_}qVic`}V@$TwFi15!mF1*m2wVX+>5p%(+R zQ~JUW*zWkalde{90@2v+oVlkxOZFihE&ZJ){c?hX3L2@R7jk*xjYtHi=}qb+4B(XJ z$gYcNudR~4Kz_WRq8eS((>ALWCO)&R-MXE+YxDn9V#X{_H@j616<|P(8h(7z?q*r+ zmpqR#7+g$cT@e&(%_|ipI&A%9+47%30TLY(yuf&*knx1wNx|%*H^;YB%ftt%5>QM= z^i;*6_KTSRzQm%qz*>cK&EISvF^ovbS4|R%)zKhTH_2K>jP3mBGn5{95&G9^a#4|K zv+!>fIsR8z{^x4)FIr*cYT@Q4Z{y}};rLHL+atCgHbfX*;+k&37DIgENn&=k(*lKD zG;uL-KAdLn*JQ?@r6Q!0V$xXP=J2i~;_+i3|F;_En;oAMG|I-RX#FwnmU&G}w`7R{ z788CrR-g1DW4h_`&$Z`ctN~{A)Hv_-Bl!%+pfif8wN32rMD zJDs$eVWBYQx1&2sCdB0!vU5~uf)=vy*{}t{2VBpcz<+~h0wb7F3?V^44*&83Z2#F` z32!rd4>uc63rQP$3lTH3zb-47IGR}f)8kZ4JvX#toIpXH`L%NnPDE~$QI1)0)|HS4 zVcITo$$oWWwCN@E-5h>N?Hua!N9CYb6f8vTFd>h3q5Jg-lCI6y%vu{Z_Uf z$MU{{^o~;nD_@m2|E{J)q;|BK7rx%`m``+OqZAqAVj-Dy+pD4-S3xK?($>wn5bi90CFAQ+ACd;&m6DQB8_o zjAq^=eUYc1o{#+p+ zn;K<)Pn*4u742P!;H^E3^Qu%2dM{2slouc$AN_3V^M7H_KY3H)#n7qd5_p~Za7zAj|s9{l)RdbV9e||_67`#Tu*c<8!I=zb@ z(MSvQ9;Wrkq6d)!9afh+G`!f$Ip!F<4ADdc*OY-y7BZMsau%y?EN6*hW4mOF%Q~bw z2==Z3^~?q<1GTeS>xGN-?CHZ7a#M4kDL zQxQr~1ZMzCSKFK5+32C%+C1kE#(2L=15AR!er7GKbp?Xd1qkkGipx5Q~FI-6zt< z*PTpeVI)Ngnnyaz5noIIgNZtb4bQdKG{Bs~&tf)?nM$a;7>r36djllw%hQxeCXeW^ z(i6@TEIuxD<2ulwLTt|&gZP%Ei+l!(%p5Yij6U(H#HMkqM8U$@OKB|5@vUiuY^d6X zW}fP3;Kps6051OEO(|JzmVU6SX(8q>*yf*x5QoxDK={PH^F?!VCzES_Qs>()_y|jg6LJlJWp;L zKM*g5DK7>W_*uv}{0WUB0>MHZ#oJZmO!b3MjEc}VhsLD~;E-qNNd?x7Q6~v zR=0$u>Zc2Xr}>x_5$-s#l!oz6I>W?lw;m9Ae{Tf9eMX;TI-Wf_mZ6sVrMnY#F}cDd z%CV*}fDsXUF7Vbw>PuDaGhu631+3|{xp<@Kl|%WxU+vuLlcrklMC!Aq+7n~I3cmQ! z`e3cA!XUEGdEPSu``&lZEKD1IKO(-VGvcnSc153m(i!8ohi`)N2n>U_BemYJ`uY>8B*Epj!oXRLV}XK}>D*^DHQ7?NY*&LJ9VSo`Ogi9J zGa;clWI8vIQqkngv2>xKd91K>?0`Sw;E&TMg&6dcd20|FcTsnUT7Yn{oI5V4@Ow~m zz#k~8TM!A9L7T!|colrC0P2WKZW7PNj_X4MfESbt<-soq*0LzShZ}fyUx!(xIIDwx zRHt^_GAWe0-Vm~bDZ(}XG%E+`XhKpPlMBo*5q_z$BGxYef8O!ToS8aT8pmjbPq)nV z%x*PF5ZuSHRJqJ!`5<4xC*xb2vC?7u1iljB_*iUGl6+yPyjn?F?GOF2_KW&gOkJ?w z3e^qc-te;zez`H$rsUCE0<@7PKGW?7sT1SPYWId|FJ8H`uEdNu4YJjre`8F*D}6Wh z|FQ`xf7yiphHIAkU&OYCn}w^ilY@o4larl?^M7&8YI;hzBIsX|i3UrLsx{QDKwCX< zy;a>yjfJ6!sz`NcVi+a!Fqk^VE^{6G53L?@Tif|j!3QZ0fk9QeUq8CWI;OmO-Hs+F zuZ4sHLA3{}LR2Qlyo+{d@?;`tpp6YB^BMoJt?&MHFY!JQwoa0nTSD+#Ku^4b{5SZVFwU9<~APYbaLO zu~Z)nS#dxI-5lmS-Bnw!(u15by(80LlC@|ynj{TzW)XcspC*}z0~8VRZq>#Z49G`I zgl|C#H&=}n-ajxfo{=pxPV(L*7g}gHET9b*s=cGV7VFa<;Htgjk>KyW@S!|z`lR1( zGSYkEl&@-bZ*d2WQ~hw3NpP=YNHF^XC{TMG$Gn+{b6pZn+5=<()>C!N^jncl0w6BJ zdHdnmSEGK5BlMeZD!v4t5m7ct7{k~$1Ie3GLFoHjAH*b?++s<|=yTF+^I&jT#zuMx z)MLhU+;LFk8bse|_{j+d*a=&cm2}M?*arjBPnfPgLwv)86D$6L zLJ0wPul7IenMvVAK$z^q5<^!)7aI|<&GGEbOr=E;UmGOIa}yO~EIr5xWU_(ol$&fa zR5E(2vB?S3EvJglTXdU#@qfDbCYs#82Yo^aZN6`{Ex#M)easBTe_J8utXu(fY1j|R z9o(sQbj$bKU{IjyhosYahY{63>}$9_+hWxB3j}VQkJ@2$D@vpeRSldU?&7I;qd2MF zSYmJ>zA(@N_iK}m*AMPIJG#Y&1KR)6`LJ83qg~`Do3v^B0>fU&wUx(qefuTgzFED{sJ65!iw{F2}1fQ3= ziFIP{kezQxmlx-!yo+sC4PEtG#K=5VM9YIN0z9~c4XTX?*4e@m;hFM!zVo>A`#566 z>f&3g94lJ{r)QJ5m7Xe3SLau_lOpL;A($wsjHR`;xTXgIiZ#o&vt~ zGR6KdU$FFbLfZCC3AEu$b`tj!9XgOGLSV=QPIYW zjI!hSP#?8pn0@ezuenOzoka8!8~jXTbiJ6+ZuItsWW03uzASFyn*zV2kIgPFR$Yzm zE<$cZlF>R8?Nr2_i?KiripBc+TGgJvG@vRTY2o?(_Di}D30!k&CT`>+7ry2!!iC*X z<@=U0_C#16=PN7bB39w+zPwDOHX}h20Ap);dx}kjXX0-QkRk=cr};GYsjSvyLZa-t zzHONWddi*)RDUH@RTAsGB_#&O+QJaaL+H<<9LLSE+nB@eGF1fALwjVOl8X_sdOYme z0lk!X=S(@25=TZHR7LlPp}fY~yNeThMIjD}pd9+q=j<_inh0$>mIzWVY+Z9p<{D^#0Xk+b_@eNSiR8;KzSZ#7lUsk~NGMcB8C2c=m2l5paHPq`q{S(kdA7Z1a zyfk2Y;w?^t`?@yC5Pz9&pzo}Hc#}mLgDmhKV|PJ3lKOY(Km@Fi2AV~CuET*YfUi}u zfInZnqDX(<#vaS<^fszuR=l)AbqG{}9{rnyx?PbZz3Pyu!eSJK`uwkJU!ORQXy4x83r!PNgOyD33}}L=>xX_93l6njNTuqL8J{l%*3FVn3MG4&Fv*`lBXZ z?=;kn6HTT^#SrPX-N)4EZiIZI!0ByXTWy;;J-Tht{jq1mjh`DSy7yGjHxIaY%*sTx zuy9#9CqE#qi>1misx=KRWm=qx4rk|}vd+LMY3M`ow8)}m$3Ggv&)Ri*ON+}<^P%T5 z_7JPVPfdM=Pv-oH<tecoE}(0O7|YZc*d8`Uv_M*3Rzv7$yZnJE6N_W=AQ3_BgU_TjA_T?a)U1csCmJ&YqMp-lJe`y6>N zt++Bi;ZMOD%%1c&-Q;bKsYg!SmS^#J@8UFY|G3!rtyaTFb!5@e(@l?1t(87ln8rG? z--$1)YC~vWnXiW3GXm`FNSyzu!m$qT=Eldf$sMl#PEfGmzQs^oUd=GIQfj(X=}dw+ zT*oa0*oS%@cLgvB&PKIQ=Ok?>x#c#dC#sQifgMwtAG^l3D9nIg(Zqi;D%807TtUUCL3_;kjyte#cAg?S%e4S2W>9^A(uy8Ss0Tc++ZTjJw1 z&Em2g!3lo@LlDyri(P^I8BPpn$RE7n*q9Q-c^>rfOMM6Pd5671I=ZBjAvpj8oIi$! zl0exNl(>NIiQpX~FRS9UgK|0l#s@#)p4?^?XAz}Gjb1?4Qe4?j&cL$C8u}n)?A@YC zfmbSM`Hl5pQFwv$CQBF=_$Sq zxsV?BHI5bGZTk?B6B&KLdIN-40S426X3j_|ceLla*M3}3gx3(_7MVY1++4mzhH#7# zD>2gTHy*%i$~}mqc#gK83288SKp@y3wz1L_e8fF$Rb}ex+`(h)j}%~Ld^3DUZkgez zOUNy^%>>HHE|-y$V@B}-M|_{h!vXpk01xaD%{l{oQ|~+^>rR*rv9iQen5t?{BHg|% zR`;S|KtUb!X<22RTBA4AAUM6#M?=w5VY-hEV)b`!y1^mPNEoy2K)a>OyA?Q~Q*&(O zRzQI~y_W=IPi?-OJX*&&8dvY0zWM2%yXdFI!D-n@6FsG)pEYdJbuA`g4yy;qrgR?G z8Mj7gv1oiWq)+_$GqqQ$(ZM@#|0j7})=#$S&hZwdoijFI4aCFLVI3tMH5fLreZ;KD zqA`)0l~D2tuIBYOy+LGw&hJ5OyE+@cnZ0L5+;yo2pIMdt@4$r^5Y!x7nHs{@>|W(MzJjATyWGNwZ^4j+EPU0RpAl-oTM@u{lx*i0^yyWPfHt6QwPvYpk9xFMWfBFt!+Gu6TlAmr zeQ#PX71vzN*_-xh&__N`IXv6`>CgV#eA_%e@7wjgkj8jlKzO~Ic6g$cT`^W{R{606 zCDP~+NVZ6DMO$jhL~#+!g*$T!XW63#(ngDn#Qwy71yj^gazS{e;3jGRM0HedGD@pt z?(ln3pCUA(ekqAvvnKy0G@?-|-dh=eS%4Civ&c}s%wF@0K5Bltaq^2Os1n6Z3%?-Q zAlC4goQ&vK6TpgtzkHVt*1!tBYt-`|5HLV1V7*#45Vb+GACuU+QB&hZ=N_flPy0TY zR^HIrdskB#<$aU;HY(K{a3(OQa$0<9qH(oa)lg@Uf>M5g2W0U5 zk!JSlhrw8quBx9A>RJ6}=;W&wt@2E$7J=9SVHsdC?K(L(KACb#z)@C$xXD8^!7|uv zZh$6fkq)aoD}^79VqdJ!Nz-8$IrU(_-&^cHBI;4 z^$B+1aPe|LG)C55LjP;jab{dTf$0~xbXS9!!QdcmDYLbL^jvxu2y*qnx2%jbL%rB z{aP85qBJe#(&O~Prk%IJARcdEypZ)vah%ZZ%;Zk{eW(U)Bx7VlzgOi8)x z`rh4l`@l_Ada7z&yUK>ZF;i6YLGwI*Sg#Fk#Qr0Jg&VLax(nNN$u-XJ5=MsP3|(lEdIOJ7|(x3iY;ea)5#BW*mDV%^=8qOeYO&gIdJVuLLN3cFaN=xZtFB=b zH{l)PZl_j^u+qx@89}gAQW7ofb+k)QwX=aegihossZq*+@PlCpb$rpp>Cbk9UJO<~ zDjlXQ_Ig#W0zdD3&*ei(FwlN#3b%FSR%&M^ywF@Fr>d~do@-kIS$e%wkIVfJ|Ohh=zc zF&Rnic^|>@R%v?@jO}a9;nY3Qrg_!xC=ZWUcYiA5R+|2nsM*$+c$TOs6pm!}Z}dfM zGeBhMGWw3$6KZXav^>YNA=r6Es>p<6HRYcZY)z{>yasbC81A*G-le8~QoV;rtKnkx z;+os8BvEe?0A6W*a#dOudsv3aWs?d% z0oNngyVMjavLjtjiG`!007#?62ClTqqU$@kIY`=x^$2e>iqIy1>o|@Tw@)P)B8_1$r#6>DB_5 zmaOaoE~^9TolgDgooKFuEFB#klSF%9-~d2~_|kQ0Y{Ek=HH5yq9s zDq#1S551c`kSiWPZbweN^A4kWiP#Qg6er1}HcKv{fxb1*BULboD0fwfaNM_<55>qM zETZ8TJDO4V)=aPp_eQjX%||Ud<>wkIzvDlpNjqW>I}W!-j7M^TNe5JIFh#-}zAV!$ICOju8Kx)N z0vLtzDdy*rQN!7r>Xz7rLw8J-(GzQlYYVH$WK#F`i_i^qVlzTNAh>gBWKV@XC$T-` z3|kj#iCquDhiO7NKum07i|<-NuVsX}Q}mIP$jBJDMfUiaWR3c|F_kWBMw0_Sr|6h4 zk`_r5=0&rCR^*tOy$A8K;@|NqwncjZ>Y-75vlpxq%Cl3EgH`}^^~=u zoll6xxY@a>0f%Ddpi;=cY}fyG!K2N-dEyXXmUP5u){4VnyS^T4?pjN@Ot4zjL(Puw z_U#wMH2Z#8Pts{olG5Dy0tZj;N@;fHheu>YKYQU=4Bk|wcD9MbA`3O4bj$hNRHwzb zSLcG0SLV%zywdbuwl(^E_!@&)TdXge4O{MRWk2RKOt@!8E{$BU-AH(@4{gxs=YAz9LIob|Hzto0}9cWoz6Tp2x0&xi#$ zHh$dwO&UCR1Ob2w00-2eG7d4=cN(Y>0R#$q8?||q@iTi+7-w-xR%uMr&StFIthC<# zvK(aPduwuNB}oJUV8+Zl)%cnfsHI%4`;x6XW^UF^e4s3Z@S<&EV8?56Wya;HNs0E> z`$0dgRdiUz9RO9Au3RmYq>K#G=X%*_dUbSJHP`lSfBaN8t-~@F>)BL1RT*9I851A3 z<-+Gb#_QRX>~av#Ni<#zLswtu-c6{jGHR>wflhKLzC4P@b%8&~u)fosoNjk4r#GvC zlU#UU9&0Hv;d%g72Wq?Ym<&&vtA3AB##L}=ZjiTR4hh7J)e>ei} zt*u+>h%MwN`%3}b4wYpV=QwbY!jwfIj#{me)TDOG`?tI!%l=AwL2G@9I~}?_dA5g6 zCKgK(;6Q0&P&K21Tx~k=o6jwV{dI_G+Ba*Zts|Tl6q1zeC?iYJTb{hel*x>^wb|2RkHkU$!+S4OU4ZOKPZjV>9OVsqNnv5jK8TRAE$A&^yRwK zj-MJ3Pl?)KA~fq#*K~W0l4$0=8GRx^9+?w z!QT8*-)w|S^B0)ZeY5gZPI2G(QtQf?DjuK(s^$rMA!C%P22vynZY4SuOE=wX2f8$R z)A}mzJi4WJnZ`!bHG1=$lwaxm!GOnRbR15F$nRC-M*H<*VfF|pQw(;tbSfp({>9^5 zw_M1-SJ9eGF~m(0dvp*P8uaA0Yw+EkP-SWqu zqal$hK8SmM7#Mrs0@OD+%_J%H*bMyZiWAZdsIBj#lkZ!l2c&IpLu(5^T0Ge5PHzR} zn;TXs$+IQ_&;O~u=Jz+XE0wbOy`=6>m9JVG} zJ~Kp1e5m?K3x@@>!D)piw^eMIHjD4RebtR`|IlckplP1;r21wTi8v((KqNqn%2CB< zifaQc&T}*M&0i|LW^LgdjIaX|o~I$`owHolRqeH_CFrqCUCleN130&vH}dK|^kC>) z-r2P~mApHotL4dRX$25lIcRh_*kJaxi^%ZN5-GAAMOxfB!6flLPY-p&QzL9TE%ho( zRwftE3sy5<*^)qYzKkL|rE>n@hyr;xPqncY6QJ8125!MWr`UCWuC~A#G1AqF1@V$kv>@NBvN&2ygy*{QvxolkRRb%Ui zsmKROR%{*g*WjUUod@@cS^4eF^}yQ1>;WlGwOli z+Y$(8I`0(^d|w>{eaf!_BBM;NpCoeem2>J}82*!em=}}ymoXk>QEfJ>G(3LNA2-46 z5PGvjr)Xh9>aSe>vEzM*>xp{tJyZox1ZRl}QjcvX2TEgNc^(_-hir@Es>NySoa1g^ zFow_twnHdx(j?Q_3q51t3XI7YlJ4_q&(0#)&a+RUy{IcBq?)eaWo*=H2UUVIqtp&lW9JTJiP&u zw8+4vo~_IJXZIJb_U^&=GI1nSD%e;P!c{kZALNCm5c%%oF+I3DrA63_@4)(v4(t~JiddILp7jmoy+>cD~ivwoctFfEL zP*#2Rx?_&bCpX26MBgp^4G>@h`Hxc(lnqyj!*t>9sOBcXN(hTwEDpn^X{x!!gPX?1 z*uM$}cYRwHXuf+gYTB}gDTcw{TXSOUU$S?8BeP&sc!Lc{{pEv}x#ELX>6*ipI1#>8 zKes$bHjiJ1OygZge_ak^Hz#k;=od1wZ=o71ba7oClBMq>Uk6hVq|ePPt)@FM5bW$I z;d2Or@wBjbTyZj|;+iHp%Bo!Vy(X3YM-}lasMItEV_QrP-Kk_J4C>)L&I3Xxj=E?| zsAF(IfVQ4w+dRRnJ>)}o^3_012YYgFWE)5TT=l2657*L8_u1KC>Y-R{7w^S&A^X^U}h20jpS zQsdeaA#WIE*<8KG*oXc~$izYilTc#z{5xhpXmdT-YUnGh9v4c#lrHG6X82F2-t35} zB`jo$HjKe~E*W$=g|j&P>70_cI`GnOQ;Jp*JK#CT zuEGCn{8A@bC)~0%wsEv?O^hSZF*iqjO~_h|>xv>PO+?525Nw2472(yqS>(#R)D7O( zg)Zrj9n9$}=~b00=Wjf?E418qP-@8%MQ%PBiCTX=$B)e5cHFDu$LnOeJ~NC;xmOk# z>z&TbsK>Qzk)!88lNI8fOE2$Uxso^j*1fz>6Ot49y@=po)j4hbTIcVR`ePHpuJSfp zxaD^Dn3X}Na3@<_Pc>a;-|^Pon(>|ytG_+U^8j_JxP=_d>L$Hj?|0lz>_qQ#a|$+( z(x=Lipuc8p4^}1EQhI|TubffZvB~lu$zz9ao%T?%ZLyV5S9}cLeT?c} z>yCN9<04NRi~1oR)CiBakoNhY9BPnv)kw%*iv8vdr&&VgLGIs(-FbJ?d_gfbL2={- zBk4lkdPk~7+jIxd4{M(-W1AC_WcN&Oza@jZoj zaE*9Y;g83#m(OhA!w~LNfUJNUuRz*H-=$s*z+q+;snKPRm9EptejugC-@7-a-}Tz0 z@KHra#Y@OXK+KsaSN9WiGf?&jlZ!V7L||%KHP;SLksMFfjkeIMf<1e~t?!G3{n)H8 zQAlFY#QwfKuj;l@<$YDATAk;%PtD%B(0<|8>rXU< zJ66rkAVW_~Dj!7JGdGGi4NFuE?7ZafdMxIh65Sz7yQoA7fBZCE@WwysB=+`kT^LFX zz8#FlSA5)6FG9(qL3~A24mpzL@@2D#>0J7mMS1T*9UJ zvOq!!a(%IYY69+h45CE?(&v9H4FCr>gK0>mK~F}5RdOuH2{4|}k@5XpsX7+LZo^Qa4sH5`eUj>iffoBVm+ zz4Mtf`h?NW$*q1yr|}E&eNl)J``SZvTf6Qr*&S%tVv_OBpbjnA0&Vz#(;QmGiq-k! zgS0br4I&+^2mgA15*~Cd00cXLYOLA#Ep}_)eED>m+K@JTPr_|lSN}(OzFXQSBc6fM z@f-%2;1@BzhZa*LFV z-LrLmkmB%<<&jEURBEW>soaZ*rSIJNwaV%-RSaCZi4X)qYy^PxZ=oL?6N-5OGOMD2 z;q_JK?zkwQ@b3~ln&sDtT5SpW9a0q+5Gm|fpVY2|zqlNYBR}E5+ahgdj!CvK$Tlk0 z9g$5N;aar=CqMsudQV>yb4l@hN(9Jcc=1(|OHsqH6|g=K-WBd8GxZ`AkT?OO z-z_Ued-??Z*R4~L7jwJ%-`s~FK|qNAJ;EmIVDVpk{Lr7T4l{}vL)|GuUuswe9c5F| zv*5%u01hlv08?00Vpwyk*Q&&fY8k6MjOfpZfKa@F-^6d=Zv|0@&4_544RP5(s|4VPVP-f>%u(J@23BHqo2=zJ#v9g=F!cP((h zpt0|(s++ej?|$;2PE%+kc6JMmJjDW)3BXvBK!h!E`8Y&*7hS{c_Z?4SFP&Y<3evqf z9-ke+bSj$%Pk{CJlJbWwlBg^mEC^@%Ou?o>*|O)rl&`KIbHrjcpqsc$Zqt0^^F-gU2O=BusO+(Op}!jNzLMc zT;0YT%$@ClS%V+6lMTfhuzzxomoat=1H?1$5Ei7&M|gxo`~{UiV5w64Np6xV zVK^nL$)#^tjhCpTQMspXI({TW^U5h&Wi1Jl8g?P1YCV4=%ZYyjSo#5$SX&`r&1PyC zzc;uzCd)VTIih|8eNqFNeBMe#j_FS6rq81b>5?aXg+E#&$m++Gz9<+2)h=K(xtn}F ziV{rmu+Y>A)qvF}ms}4X^Isy!M&1%$E!rTO~5(p+8{U6#hWu>(Ll1}eD64Xa>~73A*538wry?v$vW z>^O#FRdbj(k0Nr&)U`Tl(4PI*%IV~;ZcI2z&rmq=(k^}zGOYZF3b2~Klpzd2eZJl> zB=MOLwI1{$RxQ7Y4e30&yOx?BvAvDkTBvWPpl4V8B7o>4SJn*+h1Ms&fHso%XLN5j z-zEwT%dTefp~)J_C8;Q6i$t!dnlh-!%haR1X_NuYUuP-)`IGWjwzAvp!9@h`kPZhf zwLwFk{m3arCdx8rD~K2`42mIN4}m%OQ|f)4kf%pL?Af5Ul<3M2fv>;nlhEPR8b)u} zIV*2-wyyD%%) zl$G@KrC#cUwoL?YdQyf9WH)@gWB{jd5w4evI& zOFF)p_D8>;3-N1z6mES!OPe>B^<;9xsh)){Cw$Vs-ez5nXS95NOr3s$IU;>VZSzKn zBvub8_J~I%(DozZW@{)Vp37-zevxMRZ8$8iRfwHmYvyjOxIOAF2FUngKj289!(uxY zaClWm!%x&teKmr^ABrvZ(ikx{{I-lEzw5&4t3P0eX%M~>$wG0ZjA4Mb&op+0$#SO_ z--R`>X!aqFu^F|a!{Up-iF(K+alKB{MNMs>e(i@Tpy+7Z-dK%IEjQFO(G+2mOb@BO zP>WHlS#fSQm0et)bG8^ZDScGnh-qRKIFz zfUdnk=m){ej0i(VBd@RLtRq3Ep=>&2zZ2%&vvf?Iex01hx1X!8U+?>ER;yJlR-2q4 z;Y@hzhEC=d+Le%=esE>OQ!Q|E%6yG3V_2*uh&_nguPcZ{q?DNq8h_2ahaP6=pP-+x zK!(ve(yfoYC+n(_+chiJ6N(ZaN+XSZ{|H{TR1J_s8x4jpis-Z-rlRvRK#U%SMJ(`C z?T2 zF(NNfO_&W%2roEC2j#v*(nRgl1X)V-USp-H|CwFNs?n@&vpRcj@W@xCJwR6@T!jt377?XjZ06=`d*MFyTdyvW!`mQm~t3luzYzvh^F zM|V}rO>IlBjZc}9Z zd$&!tthvr>5)m;5;96LWiAV0?t)7suqdh0cZis`^Pyg@?t>Ms~7{nCU;z`Xl+raSr zXpp=W1oHB*98s!Tpw=R5C)O{{Inl>9l7M*kq%#w9a$6N~v?BY2GKOVRkXYCgg*d

<5G2M1WZP5 zzqSuO91lJod(SBDDw<*sX(+F6Uq~YAeYV#2A;XQu_p=N5X+#cmu19Qk>QAnV=k!?wbk5I;tDWgFc}0NkvC*G=V+Yh1cyeJVq~9czZiDXe+S=VfL2g`LWo8om z$Y~FQc6MFjV-t1Y`^D9XMwY*U_re2R?&(O~68T&D4S{X`6JYU-pz=}ew-)V0AOUT1 zVOkHAB-8uBcRjLvz<9HS#a@X*Kc@|W)nyiSgi|u5$Md|P()%2(?olGg@ypoJwp6>m z*dnfjjWC>?_1p;%1brqZyDRR;8EntVA92EJ3ByOxj6a+bhPl z;a?m4rQAV1@QU^#M1HX)0+}A<7TCO`ZR_RzF}X9-M>cRLyN4C+lCk2)kT^3gN^`IT zNP~fAm(wyIoR+l^lQDA(e1Yv}&$I!n?&*p6?lZcQ+vGLLd~fM)qt}wsbf3r=tmVYe zl)ntf#E!P7wlakP9MXS7m0nsAmqxZ*)#j;M&0De`oNmFgi$ov#!`6^4)iQyxg5Iuj zjLAhzQ)r`^hf7`*1`Rh`X;LVBtDSz@0T?kkT1o!ijeyTGt5vc^Cd*tmNgiNo^EaWvaC8$e+nb_{W01j3%=1Y&92YacjCi>eNbwk%-gPQ@H-+4xskQ}f_c=jg^S-# zYFBDf)2?@5cy@^@FHK5$YdAK9cI;!?Jgd}25lOW%xbCJ>By3=HiK@1EM+I46A)Lsd zeT|ZH;KlCml=@;5+hfYf>QNOr^XNH%J-lvev)$Omy8MZ`!{`j>(J5cG&ZXXgv)TaF zg;cz99i$4CX_@3MIb?GL0s*8J=3`#P(jXF(_(6DXZjc@(@h&=M&JG)9&Te1?(^XMW zjjC_70|b=9hB6pKQi`S^Ls7JyJw^@P>Ko^&q8F&?>6i;#CbxUiLz1ZH4lNyd@QACd zu>{!sqjB!2Dg}pbAXD>d!3jW}=5aN0b;rw*W>*PAxm7D)aw(c*RX2@bTGEI|RRp}vw7;NR2wa;rXN{L{Q#=Fa z$x@ms6pqb>!8AuV(prv>|aU8oWV={C&$c zMa=p=CDNOC2tISZcd8~18GN5oTbKY+Vrq;3_obJlfSKRMk;Hdp1`y`&LNSOqeauR_ z^j*Ojl3Ohzb5-a49A8s|UnM*NM8tg}BJXdci5%h&;$afbmRpN0&~9rCnBA`#lG!p zc{(9Y?A0Y9yo?wSYn>iigf~KP$0*@bGZ>*YM4&D;@{<%Gg5^uUJGRrV4 z(aZOGB&{_0f*O=Oi0k{@8vN^BU>s3jJRS&CJOl3o|BE{FAA&a#2YYiX3pZz@|Go-F z|Fly;7eX2OTs>R}<`4RwpHFs9nwh)B28*o5qK1Ge=_^w0m`uJOv!=&!tzt#Save(C zgKU=Bsgql|`ui(e1KVxR`?>Dx>(rD1$iWp&m`v)3A!j5(6vBm*z|aKm*T*)mo(W;R zNGo2`KM!^SS7+*9YxTm6YMm_oSrLceqN*nDOAtagULuZl5Q<7mOnB@Hq&P|#9y{5B z!2x+2s<%Cv2Aa0+u{bjZXS);#IFPk(Ph-K7K?3i|4ro> zRbqJoiOEYo(Im^((r}U4b8nvo_>4<`)ut`24?ILnglT;Pd&U}$lV3U$F9#PD(O=yV zgNNA=GW|(E=&m_1;uaNmipQe?pon4{T=zK!N!2_CJL0E*R^XXIKf*wi!>@l}3_P9Z zF~JyMbW!+n-+>!u=A1ESxzkJy$DRuG+$oioG7(@Et|xVbJ#BCt;J43Nvj@MKvTxzy zMmjNuc#LXBxFAwIGZJk~^!q$*`FME}yKE8d1f5Mp}KHNq(@=Z8YxV}0@;YS~|SpGg$_jG7>_8WWYcVx#4SxpzlV9N4aO>K{c z$P?a_fyDzGX$Of3@ykvedGd<@-R;M^Shlj*SswJLD+j@hi_&_>6WZ}#AYLR0iWMK|A zH_NBeu(tMyG=6VO-=Pb>-Q#$F*or}KmEGg*-n?vWQREURdB#+6AvOj*I%!R-4E_2$ zU5n9m>RWs|Wr;h2DaO&mFBdDb-Z{APGQx$(L`if?C|njd*fC=rTS%{o69U|meRvu?N;Z|Y zbT|ojL>j;q*?xXmnHH#3R4O-59NV1j=uapkK7}6@Wo*^Nd#(;$iuGsb;H315xh3pl zHaJ>h-_$hdNl{+|Zb%DZH%ES;*P*v0#}g|vrKm9;j-9e1M4qX@zkl&5OiwnCz=tb6 zz<6HXD+rGIVpGtkb{Q^LIgExOm zz?I|oO9)!BOLW#krLmWvX5(k!h{i>ots*EhpvAE;06K|u_c~y{#b|UxQ*O@Ks=bca z^_F0a@61j3I(Ziv{xLb8AXQj3;R{f_l6a#H5ukg5rxwF9A$?Qp-Mo54`N-SKc}fWp z0T)-L@V$$&my;l#Ha{O@!fK4-FSA)L&3<${Hcwa7ue`=f&YsXY(NgeDU#sRlT3+9J z6;(^(sjSK@3?oMo$%L-nqy*E;3pb0nZLx6 z;h5)T$y8GXK1DS-F@bGun8|J(v-9o=42&nLJy#}M5D0T^5VWBNn$RpC zZzG6Bt66VY4_?W=PX$DMpKAI!d`INr) zkMB{XPQ<52rvWVQqgI0OL_NWxoe`xxw&X8yVftdODPj5|t}S6*VMqN$-h9)1MBe0N zYq?g0+e8fJCoAksr0af1)FYtz?Me!Cxn`gUx&|T;)695GG6HF7!Kg1zzRf_{VWv^bo81v4$?F6u2g|wxHc6eJQAg&V z#%0DnWm2Rmu71rPJ8#xFUNFC*V{+N_qqFH@gYRLZ6C?GAcVRi>^n3zQxORPG)$-B~ z%_oB?-%Zf7d*Fe;cf%tQwcGv2S?rD$Z&>QC2X^vwYjnr5pa5u#38cHCt4G3|efuci z@3z=#A13`+ztmp;%zjXwPY_aq-;isu*hecWWX_=Z8paSqq7;XYnUjK*T>c4~PR4W7 z#C*%_H&tfGx`Y$w7`dXvVhmovDnT>btmy~SLf>>~84jkoQ%cv=MMb+a{JV&t0+1`I z32g_Y@yDhKe|K^PevP~MiiVl{Ou7^Mt9{lOnXEQ`xY^6L8D$705GON{!1?1&YJEl#fTf5Z)da=yiEQ zGgtC-soFGOEBEB~ZF_{7b(76En>d}mI~XIwNw{e>=Fv)sgcw@qOsykWr?+qAOZSVrQfg}TNI ztKNG)1SRrAt6#Q?(me%)>&A_^DM`pL>J{2xu>xa$3d@90xR61TQDl@fu%_85DuUUA za9tn64?At;{`BAW6oykwntxHeDpXsV#{tmt5RqdN7LtcF4vR~_kZNT|wqyR#z^Xcd zFdymVRZvyLfTpBT>w9<)Ozv@;Yk@dOSVWbbtm^y@@C>?flP^EgQPAwsy75bveo=}T zFxl(f)s)j(0#N_>Or(xEuV(n$M+`#;Pc$1@OjXEJZumkaekVqgP_i}p`oTx;terTx zZpT+0dpUya2hqlf`SpXN{}>PfhajNk_J0`H|2<5E;U5Vh4F8er z;RxLSFgpGhkU>W?IwdW~NZTyOBrQ84H7_?gviIf71l`EETodG9a1!8e{jW?DpwjL? zGEM&eCzwoZt^P*8KHZ$B<%{I}>46IT%jJ3AnnB5P%D2E2Z_ z1M!vr#8r}1|KTqWA4%67ZdbMW2YJ81b(KF&SQ2L1Qn(y-=J${p?xLMx3W7*MK;LFQ z6Z`aU;;mTL4XrrE;HY*Rkh6N%?qviUGNAKiCB~!P}Z->IpO6E(gGd7I#eDuT7j|?nZ zK}I(EJ>$Kb&@338M~O+em9(L!+=0zBR;JAQesx|3?Ok90)D1aS9P?yTh6Poh8Cr4X zk3zc=f2rE7jj+aP7nUsr@~?^EGP>Q>h#NHS?F{Cn`g-gD<8F&dqOh-0sa%pfL`b+1 zUsF*4a~)KGb4te&K0}bE>z3yb8% zibb5Q%Sfiv7feb1r0tfmiMv z@^4XYwg@KZI=;`wC)`1jUA9Kv{HKe2t$WmRcR4y8)VAFjRi zaz&O7Y2tDmc5+SX(bj6yGHYk$dBkWc96u3u&F)2yEE~*i0F%t9Kg^L6MJSb&?wrXi zGSc;_rln$!^ybwYBeacEFRsVGq-&4uC{F)*Y;<0y7~USXswMo>j4?~5%Zm!m@i@-> zXzi82sa-vpU{6MFRktJy+E0j#w`f`>Lbog{zP|9~hg(r{RCa!uGe>Yl536cn$;ouH za#@8XMvS-kddc1`!1LVq;h57~zV`7IYR}pp3u!JtE6Q67 zq3H9ZUcWPm2V4IukS}MCHSdF0qg2@~ufNx9+VMjQP&exiG_u9TZAeAEj*jw($G)zL zq9%#v{wVyOAC4A~AF=dPX|M}MZV)s(qI9@aIK?Pe+~ch|>QYb+78lDF*Nxz2-vpRbtQ*F4$0fDbvNM#CCatgQ@z1+EZWrt z2dZfywXkiW=no5jus-92>gXn5rFQ-COvKyegmL=4+NPzw6o@a?wGE-1Bt;pCHe;34K%Z z-FnOb%!nH;)gX+!a3nCk?5(f1HaWZBMmmC@lc({dUah+E;NOros{?ui1zPC-Q0);w zEbJmdE$oU$AVGQPdm{?xxI_0CKNG$LbY*i?YRQ$(&;NiA#h@DCxC(U@AJ$Yt}}^xt-EC_ z4!;QlLkjvSOhdx!bR~W|Ezmuf6A#@T`2tsjkr>TvW*lFCMY>Na_v8+{Y|=MCu1P8y z89vPiH5+CKcG-5lzk0oY>~aJC_0+4rS@c@ZVKLAp`G-sJB$$)^4*A!B zmcf}lIw|VxV9NSoJ8Ag3CwN&d7`|@>&B|l9G8tXT^BDHOUPrtC70NgwN4${$k~d_4 zJ@eo6%YQnOgq$th?0{h`KnqYa$Nz@vlHw<%!C5du6<*j1nwquk=uY}B8r7f|lY+v7 zm|JU$US08ugor8E$h3wH$c&i~;guC|3-tqJy#T;v(g( zBZtPMSyv%jzf->435yM(-UfyHq_D=6;ouL4!ZoD+xI5uCM5ay2m)RPmm$I}h>()hS zO!0gzMxc`BPkUZ)WXaXam%1;)gedA7SM8~8yIy@6TPg!hR0=T>4$Zxd)j&P-pXeSF z9W`lg6@~YDhd19B9ETv(%er^Xp8Yj@AuFVR_8t*KS;6VHkEDKI#!@l!l3v6`W1`1~ zP{C@keuV4Q`Rjc08lx?zmT$e$!3esc9&$XZf4nRL(Z*@keUbk!GZi(2Bmyq*saOD? z3Q$V<*P-X1p2}aQmuMw9nSMbOzuASsxten7DKd6A@ftZ=NhJ(0IM|Jr<91uAul4JR zADqY^AOVT3a(NIxg|U;fyc#ZnSzw2cr}#a5lZ38>nP{05D)7~ad7JPhw!LqOwATXtRhK!w0X4HgS1i<%AxbFmGJx9?sEURV+S{k~g zGYF$IWSlQonq6}e;B(X(sIH|;52+(LYW}v_gBcp|x%rEAVB`5LXg_d5{Q5tMDu0_2 z|LOm$@K2?lrLNF=mr%YP|U-t)~9bqd+wHb4KuPmNK<}PK6e@aosGZK57=Zt+kcszVOSbe;`E^dN! ze7`ha3WUUU7(nS0{?@!}{0+-VO4A{7+nL~UOPW9_P(6^GL0h${SLtqG!} zKl~Ng5#@Sy?65wk9z*3SA`Dpd4b4T^@C8Fhd8O)k_4%0RZL5?#b~jmgU+0|DB%0Z) zql-cPC>A9HPjdOTpPC` zQwvF}uB5kG$Xr4XnaH#ruSjM*xG?_hT7y3G+8Ox`flzU^QIgb_>2&-f+XB6MDr-na zSi#S+c!ToK84<&m6sCiGTd^8pNdXo+$3^l3FL_E`0 z>8it5YIDxtTp2Tm(?}FX^w{fbfgh7>^8mtvN>9fWgFN_*a1P`Gz*dyOZF{OV7BC#j zQV=FQM5m>47xXgapI$WbPM5V`V<7J9tD)oz@d~MDoM`R^Y6-Na(lO~uvZlpu?;zw6 zVO1faor3dg#JEb5Q*gz4<W8tgC3nE2BG2jeIQs1)<{In&7hJ39x=;ih;CJDy)>0S1at*7n?Wr0ahYCpFjZ|@u91Zl7( zv;CSBRC65-6f+*JPf4p1UZ)k=XivKTX6_bWT~7V#rq0Xjas6hMO!HJN8GdpBKg_$B zwDHJF6;z?h<;GXFZan8W{XFNPpOj!(&I1`&kWO86p?Xz`a$`7qV7Xqev|7nn_lQuX ziGpU1MMYt&5dE2A62iX3;*0WzNB9*nSTzI%62A+N?f?;S>N@8M=|ef3gtQTIA*=yq zQAAjOqa!CkHOQo4?TsqrrsJLclXcP?dlAVv?v`}YUjo1Htt;6djP@NPFH+&p1I+f_ z)Y279{7OWomY8baT(4TAOlz1OyD{4P?(DGv3XyJTA2IXe=kqD)^h(@*E3{I~w;ws8 z)ZWv7E)pbEM zd3MOXRH3mQhks9 zv6{s;k0y5vrcjXaVfw8^>YyPo=oIqd5IGI{)+TZq5Z5O&hXAw%ZlL}^6FugH;-%vP zAaKFtt3i^ag226=f0YjzdPn6|4(C2sC5wHFX{7QF!tG1E-JFA`>eZ`}$ymcRJK?0c zN363o{&ir)QySOFY0vcu6)kX#;l??|7o{HBDVJN+17rt|w3;(C_1b>d;g9Gp=8YVl zYTtA52@!7AUEkTm@P&h#eg+F*lR zQ7iotZTcMR1frJ0*V@Hw__~CL>_~2H2cCtuzYIUD24=Cv!1j6s{QS!v=PzwQ(a0HS zBKx04KA}-Ue+%9d`?PG*hIij@54RDSQpA7|>qYVIrK_G6%6;#ZkR}NjUgmGju)2F`>|WJoljo)DJgZr4eo1k1i1+o z1D{>^RlpIY8OUaOEf5EBu%a&~c5aWnqM zxBpJq98f=%M^{4mm~5`CWl%)nFR64U{(chmST&2jp+-r z3675V<;Qi-kJud%oWnCLdaU-)xTnMM%rx%Jw6v@=J|Ir=4n-1Z23r-EVf91CGMGNz zb~wyv4V{H-hkr3j3WbGnComiqmS0vn?n?5v2`Vi>{Ip3OZUEPN7N8XeUtF)Ry6>y> zvn0BTLCiqGroFu|m2zG-;Xb6;W`UyLw)@v}H&(M}XCEVXZQoWF=Ykr5lX3XWwyNyF z#jHv)A*L~2BZ4lX?AlN3X#axMwOC)PoVy^6lCGse9bkGjb=qz%kDa6}MOmSwK`cVO zt(e*MW-x}XtU?GY5}9{MKhRhYOlLhJE5=ca+-RmO04^ z66z{40J=s=ey9OCdc(RCzy zd7Zr1%!y3}MG(D=wM_ebhXnJ@MLi7cImDkhm0y{d-Vm81j`0mbi4lF=eirlr)oW~a zCd?26&j^m4AeXEsIUXiTal)+SPM4)HX%%YWF1?(FV47BaA`h9m67S9x>hWMVHx~Hg z1meUYoLL(p@b3?x|9DgWeI|AJ`Ia84*P{Mb%H$ZRROouR4wZhOPX15=KiBMHl!^JnCt$Az`KiH^_d>cev&f zaG2>cWf$=A@&GP~DubsgYb|L~o)cn5h%2`i^!2)bzOTw2UR!>q5^r&2Vy}JaWFUQE04v>2;Z@ZPwXr?y&G(B^@&y zsd6kC=hHdKV>!NDLIj+3rgZJ|dF`%N$DNd;B)9BbiT9Ju^Wt%%u}SvfM^=|q-nxDG zuWCQG9e#~Q5cyf8@y76#kkR^}{c<_KnZ0QsZcAT|YLRo~&tU|N@BjxOuy`#>`X~Q< z?R?-Gsk$$!oo(BveQLlUrcL#eirhgBLh`qHEMg`+sR1`A=1QX7)ZLMRT+GBy?&mM8 zQG^z-!Oa&J-k7I(3_2#Q6Bg=NX<|@X&+YMIOzfEO2$6Mnh}YV!m!e^__{W@-CTprr zbdh3f=BeCD$gHwCrmwgM3LAv3!Mh$wM)~KWzp^w)Cu6roO7uUG5z*}i0_0j47}pK; ztN530`ScGatLOL06~zO)Qmuv`h!gq5l#wx(EliKe&rz-5qH(hb1*fB#B+q`9=jLp@ zOa2)>JTl7ovxMbrif`Xe9;+fqB1K#l=Dv!iT;xF zdkCvS>C5q|O;}ns3AgoE({Ua-zNT-9_5|P0iANmC6O76Sq_(AN?UeEQJ>#b54fi3k zFmh+P%b1x3^)0M;QxXLP!BZ^h|AhOde*{9A=f3|Xq*JAs^Y{eViF|=EBfS6L%k4ip zk+7M$gEKI3?bQg?H3zaE@;cyv9kv;cqK$VxQbFEsy^iM{XXW0@2|DOu$!-k zSFl}Y=jt-VaT>Cx*KQnHTyXt}f9XswFB9ibYh+k2J!ofO+nD?1iw@mwtrqI4_i?nE zhLkPp41ED62me}J<`3RN80#vjW;wt`pP?%oQ!oqy7`miL>d-35a=qotK$p{IzeSk# ze_$CFYp_zIkrPFVaW^s#U4xT1lI^A0IBe~Y<4uS%zSV=wcuLr%gQT=&5$&K*bwqx| zWzCMiz>7t^Et@9CRUm9E+@hy~sBpm9fri$sE1zgLU((1?Yg{N1Sars=DiW&~Zw=3I zi7y)&oTC?UWD2w97xQ&5vx zRXEBGeJ(I?Y}eR0_O{$~)bMJRTsNUPIfR!xU9PE7A>AMNr_wbrFK>&vVw=Y;RH zO$mlpmMsQ}-FQ2cSj7s7GpC+~^Q~dC?y>M}%!-3kq(F3hGWo9B-Gn02AwUgJ>Z-pKOaj zysJBQx{1>Va=*e@sLb2z&RmQ7ira;aBijM-xQ&cpR>X3wP^foXM~u1>sv9xOjzZpX z0K;EGouSYD~oQ&lAafj3~EaXfFShC+>VsRlEMa9cg9i zFxhCKO}K0ax6g4@DEA?dg{mo>s+~RPI^ybb^u--^nTF>**0l5R9pocwB?_K)BG_)S zyLb&k%XZhBVr7U$wlhMqwL)_r&&n%*N$}~qijbkfM|dIWP{MyLx}X&}ES?}7i;9bW zmTVK@zR)7kE2+L42Q`n4m0VVg5l5(W`SC9HsfrLZ=v%lpef=Gj)W59VTLe+Z$8T8i z4V%5+T0t8LnM&H>Rsm5C%qpWBFqgTwL{=_4mE{S3EnBXknM&u8n}A^IIM4$s3m(Rd z>zq=CP-!9p9es2C*)_hoL@tDYABn+o#*l;6@7;knWIyDrt5EuakO99S$}n((Fj4y} zD!VvuRzghcE{!s;jC*<_H$y6!6QpePo2A3ZbX*ZzRnQq*b%KK^NF^z96CHaWmzU@f z#j;y?X=UP&+YS3kZx7;{ zDA{9(wfz7GF`1A6iB6fnXu0?&d|^p|6)%3$aG0Uor~8o? z*e}u#qz7Ri?8Uxp4m_u{a@%bztvz-BzewR6bh*1Xp+G=tQGpcy|4V_&*aOqu|32CM zz3r*E8o8SNea2hYJpLQ-_}R&M9^%@AMx&`1H8aDx4j%-gE+baf2+9zI*+Pmt+v{39 zDZ3Ix_vPYSc;Y;yn68kW4CG>PE5RoaV0n@#eVmk?p$u&Fy&KDTy!f^Hy6&^-H*)#u zdrSCTJPJw?(hLf56%2;_3n|ujUSJOU8VPOTlDULwt0jS@j^t1WS z!n7dZIoT+|O9hFUUMbID4Ec$!cc($DuQWkocVRcYSikFeM&RZ=?BW)mG4?fh#)KVG zcJ!<=-8{&MdE)+}?C8s{k@l49I|Zwswy^ZN3;E!FKyglY~Aq?4m74P-0)sMTGXqd5(S<-(DjjM z&7dL-Mr8jhUCAG$5^mI<|%`;JI5FVUnNj!VO2?Jiqa|c2;4^n!R z`5KK0hyB*F4w%cJ@Un6GC{mY&r%g`OX|1w2$B7wxu97%<@~9>NlXYd9RMF2UM>(z0 zouu4*+u+1*k;+nFPk%ly!nuMBgH4sL5Z`@Rok&?Ef=JrTmvBAS1h?C0)ty5+yEFRz zY$G=coQtNmT@1O5uk#_MQM1&bPPnspy5#>=_7%WcEL*n$;sSAZcXxMpcXxLe;_mLA z5F_paad+bGZV*oh@8h0(|D2P!q# zTHjmiphJ=AazSeKQPkGOR-D8``LjzToyx{lfK-1CDD6M7?pMZOdLKFtjZaZMPk4}k zW)97Fh(Z+_Fqv(Q_CMH-YYi?fR5fBnz7KOt0*t^cxmDoIokc=+`o# zrud|^h_?KW=Gv%byo~(Ln@({?3gnd?DUf-j2J}|$Mk>mOB+1{ZQ8HgY#SA8END(Zw z3T+W)a&;OO54~m}ffemh^oZ!Vv;!O&yhL0~hs(p^(Yv=(3c+PzPXlS5W79Er8B1o* z`c`NyS{Zj_mKChj+q=w)B}K za*zzPhs?c^`EQ;keH{-OXdXJet1EsQ)7;{3eF!-t^4_Srg4(Ot7M*E~91gwnfhqaM zNR7dFaWm7MlDYWS*m}CH${o?+YgHiPC|4?X?`vV+ws&Hf1ZO-w@OGG^o4|`b{bLZj z&9l=aA-Y(L11!EvRjc3Zpxk7lc@yH1e$a}8$_-r$)5++`_eUr1+dTb@ zU~2P1HM#W8qiNN3b*=f+FfG1!rFxnNlGx{15}BTIHgxO>Cq4 z;#9H9YjH%>Z2frJDJ8=xq>Z@H%GxXosS@Z>cY9ppF+)e~t_hWXYlrO6)0p7NBMa`+ z^L>-#GTh;k_XnE)Cgy|0Dw;(c0* zSzW14ZXozu)|I@5mRFF1eO%JM=f~R1dkNpZM+Jh(?&Zje3NgM{2ezg1N`AQg5%+3Y z64PZ0rPq6;_)Pj-hyIOgH_Gh`1$j1!jhml7ksHA1`CH3FDKiHLz+~=^u@kUM{ilI5 z^FPiJ7mSrzBs9{HXi2{sFhl5AyqwUnU{sPcUD{3+l-ZHAQ)C;c$=g1bdoxeG(5N01 zZy=t8i{*w9m?Y>V;uE&Uy~iY{pY4AV3_N;RL_jT_QtLFx^KjcUy~q9KcLE3$QJ{!)@$@En{UGG7&}lc*5Kuc^780;7Bj;)X?1CSy*^^ zPP^M)Pr5R>mvp3_hmCtS?5;W^e@5BjE>Cs<`lHDxj<|gtOK4De?Sf0YuK5GX9G93i zMYB{8X|hw|T6HqCf7Cv&r8A$S@AcgG1cF&iJ5=%+x;3yB`!lQ}2Hr(DE8=LuNb~Vs z=FO&2pdc16nD$1QL7j+!U^XWTI?2qQKt3H8=beVTdHHa9=MiJ&tM1RRQ-=+vy!~iz zj3O{pyRhCQ+b(>jC*H)J)%Wq}p>;?@W*Eut@P&?VU+Sdw^4kE8lvX|6czf{l*~L;J zFm*V~UC;3oQY(ytD|D*%*uVrBB}BbAfjK&%S;z;7$w68(8PV_whC~yvkZmX)xD^s6 z{$1Q}q;99W?*YkD2*;)tRCS{q2s@JzlO~<8x9}X<0?hCD5vpydvOw#Z$2;$@cZkYrp83J0PsS~!CFtY%BP=yxG?<@#{7%2sy zOc&^FJxsUYN36kSY)d7W=*1-{7ghPAQAXwT7z+NlESlkUH&8ODlpc8iC*iQ^MAe(B z?*xO4i{zFz^G=^G#9MsLKIN64rRJykiuIVX5~0#vAyDWc9-=6BDNT_aggS2G{B>dD ze-B%d3b6iCfc5{@yz$>=@1kdK^tX9qh0=ocv@9$ai``a_ofxT=>X7_Y0`X}a^M?d# z%EG)4@`^Ej_=%0_J-{ga!gFtji_byY&Vk@T1c|ucNAr(JNr@)nCWj?QnCyvXg&?FW;S-VOmNL6^km_dqiVjJuIASVGSFEos@EVF7St$WE&Z%)`Q##+0 zjaZ=JI1G@0!?l|^+-ZrNd$WrHBi)DA0-Eke>dp=_XpV<%CO_Wf5kQx}5e<90dt>8k zAi00d0rQ821nA>B4JHN7U8Zz=0;9&U6LOTKOaC1FC8GgO&kc=_wHIOGycL@c*$`ce703t%>S}mvxEnD-V!;6c`2(p74V7D0No1Xxt`urE66$0(ThaAZ1YVG#QP$ zy~NN%kB*zhZ2Y!kjn826pw4bh)75*e!dse+2Db(;bN34Uq7bLpr47XTX{8UEeC?2i z*{$`3dP}32${8pF$!$2Vq^gY|#w+VA_|o(oWmQX8^iw#n_crb(K3{69*iU?<%C-%H zuKi)3M1BhJ@3VW>JA`M>L~5*_bxH@Euy@niFrI$82C1}fwR$p2E&ZYnu?jlS}u7W9AyfdXh2pM>78bIt3 z)JBh&XE@zA!kyCDfvZ1qN^np20c1u#%P6;6tU&dx0phT1l=(mw7`u!-0e=PxEjDds z9E}{E!7f9>jaCQhw)&2TtG-qiD)lD(4jQ!q{`x|8l&nmtHkdul# zy+CIF8lKbp9_w{;oR+jSLtTfE+B@tOd6h=QePP>rh4@~!8c;Hlg9m%%&?e`*Z?qz5-zLEWfi>`ord5uHF-s{^bexKAoMEV@9nU z^5nA{f{dW&g$)BAGfkq@r5D)jr%!Ven~Q58c!Kr;*Li#`4Bu_?BU0`Y`nVQGhNZk@ z!>Yr$+nB=`z#o2nR0)V3M7-eVLuY`z@6CT#OTUXKnxZn$fNLPv7w1y7eGE=Qv@Hey`n;`U=xEl|q@CCV^#l)s0ZfT+mUf z^(j5r4)L5i2jnHW4+!6Si3q_LdOLQi<^fu?6WdohIkn79=jf%Fs3JkeXwF(?_tcF? z?z#j6iXEd(wJy4|p6v?xNk-)iIf2oX5^^Y3q3ziw16p9C6B;{COXul%)`>nuUoM*q zzmr|NJ5n)+sF$!yH5zwp=iM1#ZR`O%L83tyog-qh1I z0%dcj{NUs?{myT~33H^(%0QOM>-$hGFeP;U$puxoJ>>o-%Lk*8X^rx1>j|LtH$*)>1C!Pv&gd16%`qw5LdOIUbkNhaBBTo}5iuE%K&ZV^ zAr_)kkeNKNYJRgjsR%vexa~&8qMrQYY}+RbZ)egRg9_$vkoyV|Nc&MH@8L)`&rpqd zXnVaI@~A;Z^c3+{x=xgdhnocA&OP6^rr@rTvCnhG6^tMox$ulw2U7NgUtW%|-5VeH z_qyd47}1?IbuKtqNbNx$HR`*+9o=8`%vM8&SIKbkX9&%TS++x z5|&6P<%=F$C?owUI`%uvUq^yW0>`>yz!|WjzsoB9dT;2Dx8iSuK%%_XPgy0dTD4kd zDXF@&O_vBVVKQq(9YTClUPM30Sk7B!v7nOyV`XC!BA;BIVwphh+c)?5VJ^(C;GoQ$ zvBxr7_p*k$T%I1ke}`U&)$uf}I_T~#3XTi53OX)PoXVgxEcLJgZG^i47U&>LY(l%_ z;9vVDEtuMCyu2fqZeez|RbbIE7@)UtJvgAcVwVZNLccswxm+*L&w`&t=ttT=sv6Aq z!HouSc-24Y9;0q$>jX<1DnnGmAsP))- z^F~o99gHZw`S&Aw7e4id6Lg7kMk-e)B~=tZ!kE7sGTOJ)8@q}np@j7&7Sy{2`D^FH zI7aX%06vKsfJ168QnCM2=l|i>{I{%@gcr>ExM0Dw{PX6ozEuqFYEt z087%MKC;wVsMV}kIiuu9Zz9~H!21d!;Cu#b;hMDIP7nw3xSX~#?5#SSjyyg+Y@xh| z%(~fv3`0j#5CA2D8!M2TrG=8{%>YFr(j)I0DYlcz(2~92?G*?DeuoadkcjmZszH5& zKI@Lis%;RPJ8mNsbrxH@?J8Y2LaVjUIhRUiO-oqjy<&{2X~*f|)YxnUc6OU&5iac= z*^0qwD~L%FKiPmlzi&~a*9sk2$u<7Al=_`Ox^o2*kEv?p`#G(p(&i|ot8}T;8KLk- zPVf_4A9R`5^e`Om2LV*cK59EshYXse&IoByj}4WZaBomoHAPKqxRKbPcD`lMBI)g- zeMRY{gFaUuecSD6q!+b5(?vAnf>c`Z(8@RJy%Ulf?W~xB1dFAjw?CjSn$ph>st5bc zUac1aD_m6{l|$#g_v6;=32(mwpveQDWhmjR7{|B=$oBhz`7_g7qNp)n20|^^op3 zSfTdWV#Q>cb{CMKlWk91^;mHap{mk)o?udk$^Q^^u@&jd zfZ;)saW6{e*yoL6#0}oVPb2!}r{pAUYtn4{P~ES9tTfC5hXZnM{HrC8^=Pof{G4%Bh#8 ze~?C9m*|fd8MK;{L^!+wMy>=f^8b&y?yr6KnTq28$pFMBW9Oy7!oV5z|VM$s-cZ{I|Xf@}-)1=$V&x7e;9v81eiTi4O5-vs?^5pCKy2l>q);!MA zS!}M48l$scB~+Umz}7NbwyTn=rqt@`YtuwiQSMvCMFk2$83k50Q>OK5&fe*xCddIm)3D0I6vBU<+!3=6?(OhkO|b4fE_-j zimOzyfBB_*7*p8AmZi~X2bgVhyPy>KyGLAnOpou~sx9)S9%r)5dE%ADs4v%fFybDa_w*0?+>PsEHTbhKK^G=pFz z@IxLTCROWiKy*)cV3y%0FwrDvf53Ob_XuA1#tHbyn%Ko!1D#sdhBo`;VC*e1YlhrC z?*y3rp86m#qI|qeo8)_xH*G4q@70aXN|SP+6MQ!fJQqo1kwO_v7zqvUfU=Gwx`CR@ zRFb*O8+54%_8tS(ADh}-hUJzE`s*8wLI>1c4b@$al)l}^%GuIXjzBK!EWFO8W`>F^ ze7y#qPS0NI7*aU)g$_ziF(1ft;2<}6Hfz10cR8P}67FD=+}MfhrpOkF3hFhQu;Q1y zu%=jJHTr;0;oC94Hi@LAF5quAQ(rJG(uo%BiRQ@8U;nhX)j0i?0SL2g-A*YeAqF>RVCBOTrn{0R27vu}_S zS>tX4!#&U4W;ikTE!eFH+PKw%p+B(MR2I%n#+m0{#?qRP_tR@zpgCb=4rcrL!F=;A zh%EIF8m6%JG+qb&mEfuFTLHSxUAZEvC-+kvZKyX~SA3Umt`k}}c!5dy?-sLIM{h@> z!2=C)@nx>`;c9DdwZ&zeUc(7t<21D7qBj!|1^Mp1eZ6)PuvHx+poKSDCSBMFF{bKy z;9*&EyKitD99N}%mK8431rvbT+^%|O|HV23{;RhmS{$5tf!bIPoH9RKps`-EtoW5h zo6H_!s)Dl}2gCeGF6>aZtah9iLuGd19^z0*OryPNt{70RvJSM<#Ox9?HxGg04}b^f zrVEPceD%)#0)v5$YDE?f`73bQ6TA6wV;b^x*u2Ofe|S}+q{s5gr&m~4qGd!wOu|cZ||#h_u=k*fB;R6&k?FoM+c&J;ISg70h!J7*xGus)ta4veTdW)S^@sU@ z4$OBS=a~@F*V0ECic;ht4@?Jw<9kpjBgHfr2FDPykCCz|v2)`JxTH55?b3IM={@DU z!^|9nVO-R#s{`VHypWyH0%cs;0GO3E;It6W@0gX6wZ%W|Dzz&O%m17pa19db(er}C zUId1a4#I+Ou8E1MU$g=zo%g7K(=0Pn$)Rk z<4T2u<0rD)*j+tcy2XvY+0 z0d2pqm4)4lDewsAGThQi{2Kc3&C=|OQF!vOd#WB_`4gG3@inh-4>BoL!&#ij8bw7? zqjFRDaQz!J-YGitV4}$*$hg`vv%N)@#UdzHFI2E<&_@0Uw@h_ZHf}7)G;_NUD3@18 zH5;EtugNT0*RXVK*by>WS>jaDDfe!A61Da=VpIK?mcp^W?!1S2oah^wowRnrYjl~`lgP-mv$?yb6{{S55CCu{R z$9;`dyf0Y>uM1=XSl_$01Lc1Iy68IosWN8Q9Op=~I(F<0+_kKfgC*JggjxNgK6 z-3gQm6;sm?J&;bYe&(dx4BEjvq}b`OT^RqF$J4enP1YkeBK#>l1@-K`ajbn05`0J?0daOtnzh@l3^=BkedW1EahZlRp;`j*CaT;-21&f2wU z+Nh-gc4I36Cw+;3UAc<%ySb`#+c@5y ze~en&bYV|kn?Cn|@fqmGxgfz}U!98$=drjAkMi`43I4R%&H0GKEgx-=7PF}y`+j>r zg&JF`jomnu2G{%QV~Gf_-1gx<3Ky=Md9Q3VnK=;;u0lyTBCuf^aUi?+1+`4lLE6ZK zT#(Bf`5rmr(tgTbIt?yA@y`(Ar=f>-aZ}T~>G32EM%XyFvhn&@PWCm#-<&ApLDCXT zD#(9m|V(OOo7PmE@`vD4$S5;+9IQm19dd zvMEU`)E1_F+0o0-z>YCWqg0u8ciIknU#{q02{~YX)gc_u;8;i233D66pf(IkTDxeN zL=4z2)?S$TV9=ORVr&AkZMl<4tTh(v;Ix1{`pPVqI3n2ci&4Dg+W|N8TBUfZ*WeLF zqCH_1Q0W&f9T$lx3CFJ$o@Lz$99 zW!G&@zFHxTaP!o#z^~xgF|(vrHz8R_r9eo;TX9}2ZyjslrtH=%6O)?1?cL&BT(Amp zTGFU1%%#xl&6sH-UIJk_PGk_McFn7=%yd6tAjm|lnmr8bE2le3I~L{0(ffo}TQjyo zHZZI{-}{E4ohYTlZaS$blB!h$Jq^Rf#(ch}@S+Ww&$b);8+>g84IJcLU%B-W?+IY& zslcZIR>+U4v3O9RFEW;8NpCM0w1ROG84=WpKxQ^R`{=0MZCubg3st z48AyJNEvyxn-jCPTlTwp4EKvyEwD3e%kpdY?^BH0!3n6Eb57_L%J1=a*3>|k68A}v zaW`*4YitylfD}ua8V)vb79)N_Ixw_mpp}yJGbNu+5YYOP9K-7nf*jA1#<^rb4#AcS zKg%zCI)7cotx}L&J8Bqo8O1b0q;B1J#B5N5Z$Zq=wX~nQFgUfAE{@u0+EnmK{1hg> zC{vMfFLD;L8b4L+B51&LCm|scVLPe6h02rws@kGv@R+#IqE8>Xn8i|vRq_Z`V;x6F zNeot$1Zsu`lLS92QlLWF54za6vOEKGYQMdX($0JN*cjG7HP&qZ#3+bEN$8O_PfeAb z0R5;=zXac2IZ?fxu59?Nka;1lKm|;0)6|#RxkD05P5qz;*AL@ig!+f=lW5^Jbag%2 z%9@iM0ph$WFlxS!`p31t92z~TB}P-*CS+1Oo_g;7`6k(Jyj8m8U|Q3Sh7o-Icp4kV zK}%qri5>?%IPfamXIZ8pXbm-#{ytiam<{a5A+3dVP^xz!Pvirsq7Btv?*d7eYgx7q zWFxrzb3-%^lDgMc=Vl7^={=VDEKabTG?VWqOngE`Kt7hs236QKidsoeeUQ_^FzsXjprCDd@pW25rNx#6x&L6ZEpoX9Ffzv@olnH3rGOSW( zG-D|cV0Q~qJ>-L}NIyT?T-+x+wU%;+_GY{>t(l9dI%Ximm+Kmwhee;FK$%{dnF;C% zFjM2&$W68Sz#d*wtfX?*WIOXwT;P6NUw}IHdk|)fw*YnGa0rHx#paG!m=Y6GkS4VX zX`T$4eW9k1W!=q8!(#8A9h67fw))k_G)Q9~Q1e3f`aV@kbcSv7!priDUN}gX(iXTy zr$|kU0Vn%*ylmyDCO&G0Z3g>%JeEPFAW!5*H2Ydl>39w3W+gEUjL&vrRs(xGP{(ze zy7EMWF14@Qh>X>st8_029||TP0>7SG9on_xxeR2Iam3G~Em$}aGsNt$iES9zFa<3W zxtOF*!G@=PhfHO!=9pVPXMUVi30WmkPoy$02w}&6A7mF)G6-`~EVq5CwD2`9Zu`kd)52``#V zNSb`9dG~8(dooi1*-aSMf!fun7Sc`-C$-E(3BoSC$2kKrVcI!&yC*+ff2+C-@!AT_ zsvlAIV+%bRDfd{R*TMF><1&_a%@yZ0G0lg2K;F>7b+7A6pv3-S7qWIgx+Z?dt8}|S z>Qbb6x(+^aoV7FQ!Ph8|RUA6vXWQH*1$GJC+wXLXizNIc9p2yLzw9 z0=MdQ!{NnOwIICJc8!+Jp!zG}**r#E!<}&Te&}|B4q;U57$+pQI^}{qj669zMMe_I z&z0uUCqG%YwtUc8HVN7?0GHpu=bL7&{C>hcd5d(iFV{I5c~jpX&!(a{yS*4MEoYXh z*X4|Y@RVfn;piRm-C%b@{0R;aXrjBtvx^HO;6(>i*RnoG0Rtcd25BT6edxTNOgUAOjn zJ2)l{ipj8IP$KID2}*#F=M%^n&=bA0tY98@+2I+7~A&T-tw%W#3GV>GTmkHaqftl)#+E zMU*P(Rjo>8%P@_@#UNq(_L{}j(&-@1iY0TRizhiATJrnvwSH0v>lYfCI2ex^><3$q znzZgpW0JlQx?JB#0^^s-Js1}}wKh6f>(e%NrMwS`Q(FhazkZb|uyB@d%_9)_xb$6T zS*#-Bn)9gmobhAtvBmL+9H-+0_0US?g6^TOvE8f3v=z3o%NcPjOaf{5EMRnn(_z8- z$|m0D$FTU zDy;21v-#0i)9%_bZ7eo6B9@Q@&XprR&oKl4m>zIj-fiRy4Dqy@VVVs?rscG| zmzaDQ%>AQTi<^vYCmv#KOTd@l7#2VIpsj?nm_WfRZzJako`^uU%Nt3e;cU*y*|$7W zLm%fX#i_*HoUXu!NI$ey>BA<5HQB=|nRAwK!$L#n-Qz;~`zACig0PhAq#^5QS<8L2 zS3A+8%vbVMa7LOtTEM?55apt(DcWh#L}R^P2AY*c8B}Cx=6OFAdMPj1f>k3#^#+Hk z6uW1WJW&RlBRh*1DLb7mJ+KO>!t^t8hX1#_Wk`gjDio9)9IGbyCAGI4DJ~orK+YRv znjxRMtshZQHc$#Y-<-JOV6g^Cr@odj&Xw5B(FmI)*qJ9NHmIz_r{t)TxyB`L-%q5l ztzHgD;S6cw?7Atg*6E1!c6*gPRCb%t7D%z<(xm+K{%EJNiI2N0l8ud0Ch@_av_RW? zIr!nO4dL5466WslE6MsfMss7<)-S!e)2@r2o=7_W)OO`~CwklRWzHTfpB)_HYwgz=BzLhgZ9S<{nLBOwOIgJU=94uj6r!m>Xyn9>&xP+=5!zG_*yEoRgM0`aYts z^)&8(>z5C-QQ*o_s(8E4*?AX#S^0)aqB)OTyX>4BMy8h(cHjA8ji1PRlox@jB*1n? zDIfyDjzeg91Ao(;Q;KE@zei$}>EnrF6I}q&Xd=~&$WdDsyH0H7fJX|E+O~%LS*7^Q zYzZ4`pBdY{b7u72gZm6^5~O-57HwzwAz{)NvVaowo`X02tL3PpgLjwA`^i9F^vSpN zAqH3mRjG8VeJNHZ(1{%!XqC+)Z%D}58Qel{_weSEHoygT9pN@i zi=G;!Vj6XQk2tuJC>lza%ywz|`f7TIz*EN2Gdt!s199Dr4Tfd_%~fu8gXo~|ogt5Q zlEy_CXEe^BgsYM^o@L?s33WM14}7^T(kqohOX_iN@U?u;$l|rAvn{rwy>!yfZw13U zB@X9)qt&4;(C6dP?yRsoTMI!j-f1KC!<%~i1}u7yLXYn)(#a;Z6~r>hp~kfP));mi zcG%kdaB9H)z9M=H!f>kM->fTjRVOELNwh1amgKQT=I8J66kI)u_?0@$$~5f`u%;zl zC?pkr^p2Fe=J~WK%4ItSzKA+QHqJ@~m|Cduv=Q&-P8I5rQ-#G@bYH}YJr zUS(~(w|vKyU(T(*py}jTUp%I%{2!W!K(i$uvotcPjVddW z8_5HKY!oBCwGZcs-q`4Yt`Zk~>K?mcxg51wkZlX5e#B08I75F7#dgn5yf&Hrp`*%$ zQ;_Qg>TYRzBe$x=T(@WI9SC!ReSas9vDm(yslQjBJZde5z8GDU``r|N(MHcxNopGr z_}u39W_zwWDL*XYYt>#Xo!9kL#97|EAGyGBcRXtLTd59x%m=3i zL^9joWYA)HfL15l9%H?q`$mY27!<9$7GH(kxb%MV>`}hR4a?+*LH6aR{dzrX@?6X4 z3e`9L;cjqYb`cJmophbm(OX0b)!AFG?5`c#zLagzMW~o)?-!@e80lvk!p#&CD8u5_r&wp4O0zQ>y!k5U$h_K;rWGk=U)zX!#@Q%|9g*A zWx)qS1?fq6X<$mQTB$#3g;;5tHOYuAh;YKSBz%il3Ui6fPRv#v62SsrCdMRTav)Sg zTq1WOu&@v$Ey;@^+_!)cf|w_X<@RC>!=~+A1-65O0bOFYiH-)abINwZvFB;hJjL_$ z(9iScmUdMp2O$WW!520Hd0Q^Yj?DK%YgJD^ez$Z^?@9@Ab-=KgW@n8nC&88)TDC+E zlJM)L3r+ZJfZW_T$;Imq*#2<(j+FIk8ls7)WJ6CjUu#r5PoXxQs4b)mZza<8=v{o)VlLRM<9yw^0En#tXAj`Sylxvki{<1DPe^ zhjHwx^;c8tb?Vr$6ZB;$Ff$+3(*oinbwpN-#F)bTsXq@Sm?43MC#jQ~`F|twI=7oC zH4TJtu#;ngRA|Y~w5N=UfMZi?s0%ZmKUFTAye&6Y*y-%c1oD3yQ%IF2q2385Zl+=> zfz=o`Bedy|U;oxbyb^rB9ixG{Gb-{h$U0hVe`J;{ql!s_OJ_>>eoQn(G6h7+b^P48 zG<=Wg2;xGD-+d@UMZ!c;0>#3nws$9kIDkK13IfloGT@s14AY>&>>^#>`PT7GV$2Hp zN<{bN*ztlZu_%W=&3+=#3bE(mka6VoHEs~0BjZ$+=0`a@R$iaW)6>wp2w)=v2@|2d z%?34!+iOc5S@;AAC4hELWLH56RGxo4jw8MDMU0Wk2k_G}=Vo(>eRFo(g3@HjG|`H3 zm8b*dK=moM*oB<)*A$M9!!5o~4U``e)wxavm@O_R(`P|u%9^LGi(_%IF<6o;NLp*0 zKsfZ0#24GT8(G`i4UvoMh$^;kOhl?`0yNiyrC#HJH=tqOH^T_d<2Z+ zeN>Y9Zn!X4*DMCK^o75Zk2621bdmV7Rx@AX^alBG4%~;G_vUoxhfhFRlR&+3WwF^T zaL)8xPq|wCZoNT^>3J0K?e{J-kl+hu2rZI>CUv#-z&u@`hjeb+bBZ>bcciQVZ{SbW zez04s9oFEgc8Z+Kp{XFX`MVf-s&w9*dx7wLen(_@y34}Qz@&`$2+osqfxz4&d}{Ql z*g1ag00Gu+$C`0avds{Q65BfGsu9`_`dML*rX~hyWIe$T>CsPRoLIr%MTk3pJ^2zH1qub1MBzPG}PO;Wmav9w%F7?%l=xIf#LlP`! z_Nw;xBQY9anH5-c8A4mME}?{iewjz(Sq-29r{fV;Fc>fv%0!W@(+{={Xl-sJ6aMoc z)9Q+$bchoTGTyWU_oI19!)bD=IG&OImfy;VxNXoIO2hYEfO~MkE#IXTK(~?Z&!ae! zl8z{D&2PC$Q*OBC(rS~-*-GHNJ6AC$@eve>LB@Iq;jbBZj`wk4|LGogE||Ie=M5g= z9d`uYQ1^Sr_q2wmZE>w2WG)!F%^KiqyaDtIAct?}D~JP4shTJy5Bg+-(EA8aXaxbd~BKMtTf2iQ69jD1o* zZF9*S3!v-TdqwK$%&?91Sh2=e63;X0Lci@n7y3XOu2ofyL9^-I767eHESAq{m+@*r zbVDx!FQ|AjT;!bYsXv8ilQjy~Chiu&HNhFXt3R_6kMC8~ChEFqG@MWu#1Q1#=~#ix zrkHpJre_?#r=N0wv`-7cHHqU`phJX2M_^{H0~{VP79Dv{6YP)oA1&TSfKPEPZn2)G z9o{U1huZBLL;Tp_0OYw@+9z(jkrwIGdUrOhKJUbwy?WBt zlIK)*K0lQCY0qZ!$%1?3A#-S70F#YyUnmJF*`xx?aH5;gE5pe-15w)EB#nuf6B*c~ z8Z25NtY%6Wlb)bUA$w%HKs5$!Z*W?YKV-lE0@w^{4vw;J>=rn?u!rv$&eM+rpU6rc=j9>N2Op+C{D^mospMCjF2ZGhe4eADA#skp2EA26%p3Ex9wHW8l&Y@HX z$Qv)mHM}4*@M*#*ll5^hE9M^=q~eyWEai*P;4z<9ZYy!SlNE5nlc7gm;M&Q zKhKE4d*%A>^m0R?{N}y|i6i^k>^n4(wzKvlQeHq{l&JuFD~sTsdhs`(?lFK@Q{pU~ zb!M3c@*3IwN1RUOVjY5>uT+s-2QLWY z4T2>fiSn>>Fob+%B868-v9D@AfWr#M8eM6w#eAlhc#zk6jkLxGBGk`E3$!A@*am!R zy>29&ptYK6>cvP`b!syNp)Q$0UOW|-O@)8!?94GOYF_}+zlW%fCEl|Tep_zx05g6q z>tp47e-&R*hSNe{6{H!mL?+j$c^TXT{C&@T-xIaesNCl05 z9SLb@q&mSb)I{VXMaiWa3PWj=Ed!>*GwUe;^|uk=Pz$njNnfFY^MM>E?zqhf6^{}0 zx&~~dA5#}1ig~7HvOQ#;d9JZBeEQ+}-~v$at`m!(ai z$w(H&mWCC~;PQ1$%iuz3`>dWeb3_p}X>L2LK%2l59Tyc}4m0>9A!8rhoU3m>i2+hl zx?*qs*c^j}+WPs>&v1%1Ko8_ivAGIn@QK7A`hDz-Emkcgv2@wTbYhkiwX2l=xz*XG zaiNg+j4F-I>9v+LjosI-QECrtKjp&0T@xIMKVr+&)gyb4@b3y?2CA?=ooN zT#;rU86WLh(e@#mF*rk(NV-qSIZyr z$6!ZUmzD)%yO-ot`rw3rp6?*_l*@Z*IB0xn4|BGPWHNc-1ZUnNSMWmDh=EzWJRP`) zl%d%J613oXzh5;VY^XWJi{lB`f#u+ThvtP7 zq(HK<4>tw(=yzSBWtYO}XI`S1pMBe3!jFxBHIuwJ(@%zdQFi1Q_hU2eDuHqXte7Ki zOV55H2D6u#4oTfr7|u*3p75KF&jaLEDpxk!4*bhPc%mpfj)Us3XIG3 zIKMX^s^1wt8YK7Ky^UOG=w!o5e7W-<&c|fw2{;Q11vm@J{)@N3-p1U>!0~sKWHaL= zWV(0}1IIyt1p%=_-Fe5Kfzc71wg}`RDDntVZv;4!=&XXF-$48jS0Sc;eDy@Sg;+{A zFStc{dXT}kcIjMXb4F7MbX~2%i;UrBxm%qmLKb|2=?uPr00-$MEUIGR5+JG2l2Nq` zkM{{1RO_R)+8oQ6x&-^kCj)W8Z}TJjS*Wm4>hf+4#VJP)OBaDF%3pms7DclusBUw} z{ND#!*I6h85g6DzNvdAmnwWY{&+!KZM4DGzeHI?MR@+~|su0{y-5-nICz_MIT_#FE zm<5f3zlaKq!XyvY3H`9s&T};z!cK}G%;~!rpzk9-6L}4Rg7vXtKFsl}@sT#U#7)x- z7UWue5sa$R>N&b{J61&gvKcKlozH*;OjoDR+elkh|4bJ!_3AZNMOu?n9&|L>OTD78 z^i->ah_Mqc|Ev)KNDzfu1P3grBIM#%`QZqj5W{qu(HocQhjyS;UINoP`{J+DvV?|1 z_sw6Yr3z6%e7JKVDY<$P=M)dbk@~Yw9|2!Cw!io3%j92wTD!c^e9Vj+7VqXo3>u#= zv#M{HHJ=e$X5vQ>>ML?E8#UlmvJgTnb73{PSPTf*0)mcj6C z{KsfUbDK|F$E(k;ER%8HMdDi`=BfpZzP3cl5yJHu;v^o2FkHNk;cXc17tL8T!CsYI zfeZ6sw@;8ia|mY_AXjCS?kUfxdjDB28)~Tz1dGE|{VfBS9`0m2!m1yG?hR})er^pl4c@9Aq+|}ZlDaHL)K$O| z%9Jp-imI-Id0|(d5{v~w6mx)tUKfbuVD`xNt04Mry%M+jXzE>4(TBsx#&=@wT2Vh) z1yeEY&~17>0%P(eHP0HB^|7C+WJxQBTG$uyOWY@iDloRIb-Cf!p<{WQHR!422#F34 zG`v|#CJ^G}y9U*7jgTlD{D&y$Iv{6&PYG>{Ixg$pGk?lWrE#PJ8KunQC@}^6OP!|< zS;}p3to{S|uZz%kKe|;A0bL0XxPB&Q{J(9PyX`+Kr`k~r2}yP^ND{8!v7Q1&vtk& z2Y}l@J@{|2`oA%sxvM9i0V+8IXrZ4;tey)d;LZI70Kbim<4=WoTPZy=Yd|34v#$Kh zx|#YJ8s`J>W&jt#GcMpx84w2Z3ur-rK7gf-p5cE)=w1R2*|0mj12hvapuUWM0b~dG zMg9p8FmAZI@i{q~0@QuY44&mMUNXd7z>U58shA3o`p5eVLpq>+{(<3->DWuSFVZwC zxd50Uz(w~LxC4}bgag#q#NNokK@yNc+Q|Ap!u>Ddy+df>v;j@I12CDNN9do+0^n8p zMQs7X#+FVF0C5muGfN{r0|Nkql%BQT|K(DDNdR2pzM=_ea5+GO|J67`05AV92t@4l z0Qno0078PIHdaQGHZ~Scw!dzgqjK~3B7kf>BcP__&lLyU(cu3B^uLo%{j|Mb0NR)tkeT7Hcwp4O# z)yzu>cvG(d9~0a^)eZ;;%3ksk@F&1eEBje~ zW+-_s)&RgiweQc!otF>4%vbXKaOU41{!hw?|2`Ld3I8$&#WOsq>EG)1ANb!{N4z9@ zsU!bPG-~-bqCeIDzo^Q;gnucB{tRzm{ZH^Orphm2U+REA!*<*J6YQV83@&xoDl%#wnl5qcBqCcAF-vX5{30}(oJrnSH z{RY85hylK2dMOh2%oO1J8%)0?8TOL%rS8)+CsDv}aQ>4D)Jv+DLK)9gI^n-T^$)Tc zFPUD75qJm!Y-KBqj;JP4dV4 z`X{lGmn<)1IGz330}s}Jrjtf{(lnuuNHe5(ezA(pYa=1|Ff-LhPFK8 zyJh_b{yzu0yll6ZkpRzRjezyYivjyjW7QwO;@6X`m;2Apn2EK2!~7S}-*=;5*7K$B z`x(=!^?zgj(-`&ApZJXI09aDLXaT@<;CH=?fBOY5d|b~wBA@@p^K#nxr`)?i?SqTupI_PJ(A3cx`z~9mX_*)>L F{|7XC?P&l2 diff --git a/android/material-showcase/gradle/wrapper/gradle-wrapper.properties b/android/material-showcase/gradle/wrapper/gradle-wrapper.properties index 7f975cbe9d..ffed3a254e 100644 --- a/android/material-showcase/gradle/wrapper/gradle-wrapper.properties +++ b/android/material-showcase/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Sun Jan 31 23:07:35 PST 2021 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip diff --git a/android/material-showcase/gradlew b/android/material-showcase/gradlew index cccdd3d517..1b6c787337 100755 --- a/android/material-showcase/gradlew +++ b/android/material-showcase/gradlew @@ -1,78 +1,129 @@ -#!/usr/bin/env sh +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -81,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -89,84 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=$((i+1)) + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/android/material-showcase/gradlew.bat b/android/material-showcase/gradlew.bat index e95643d6a2..ac1b06f938 100644 --- a/android/material-showcase/gradlew.bat +++ b/android/material-showcase/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -13,15 +29,18 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -35,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -45,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell From e2498556d04cb52e166d43c28ac8f779673e891b Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Sun, 30 Jul 2023 14:40:49 +0530 Subject: [PATCH 05/18] MLImage class utilized for processing image frames. --- .../md/barcodedetection/Barcode2Processor.kt | 3 +- .../google/mlkit/md/camera/Camera2Source.kt | 18 +++++++--- .../mlkit/md/camera/Camera2SourcePreview.kt | 4 +-- .../google/mlkit/md/camera/Frame2Processor.kt | 3 +- .../mlkit/md/camera/Frame2ProcessorBase.kt | 36 ++++++++----------- .../google/mlkit/md/camera/GraphicOverlay.kt | 6 ++-- 6 files changed, 37 insertions(+), 33 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt index c797fac4e6..d718fbbe5b 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt @@ -20,6 +20,7 @@ import android.animation.ValueAnimator import android.util.Log import androidx.annotation.MainThread import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.InputInfo import com.google.mlkit.md.camera.* import com.google.mlkit.md.camera.WorkflowModel.WorkflowState @@ -36,7 +37,7 @@ class Barcode2Processor(graphicOverlay: GraphicOverlay, private val workflowMode private val scanner = BarcodeScanning.getClient() private val cameraReticleAnimator: CameraReticleAnimator = CameraReticleAnimator(graphicOverlay) - override fun detectInImage(image: InputImage): Task> = + override fun detectInImage(image: MlImage): Task> = scanner.process(image) @MainThread diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index 259e84a9e2..a2ea95d766 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -30,12 +30,15 @@ import android.util.Size import android.view.Surface import android.view.SurfaceHolder import androidx.core.app.ActivityCompat +import com.google.android.odml.image.MediaMlImageBuilder +import com.google.android.odml.image.MlImage import com.google.mlkit.md.R import com.google.mlkit.md.Utils import com.google.mlkit.md.settings.PreferenceUtils import com.google.mlkit.md.utils.OrientationLiveData import com.google.mlkit.md.utils.computeExifOrientation import java.io.IOException +import java.lang.IllegalStateException import java.util.* import kotlin.math.abs @@ -166,7 +169,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { session = cameraCaptureSession captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { addTarget(surfaceHolder.surface) - //addTarget(imageReader.surface) + addTarget(imageReader.surface) startPreview( this, imageReader, cameraCaptureSession) callback.onSuccess() } @@ -483,7 +486,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * FPS setting above to allow for some idle time in between frames. */ override fun run() { - var data: Image? + var data: MlImage? while (true) { synchronized(lock) { @@ -507,22 +510,27 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { // Hold onto the frame data locally, so that we can use this for detection // below. We need to clear pendingFrameData to ensure that this buffer isn't // recycled back to the camera before we are done using that data. - data = pendingFrame + data = pendingFrame?.let { + MediaMlImageBuilder(it) + .setRotation(pendingFrameRotation) + .build() + } pendingFrame = null + } try { synchronized(processorLock) { data?.let { //Log.d(TAG, "Processing Next Frame ${it.width} x ${it.height}") - frameProcessor?.process(it, pendingFrameRotation, graphicOverlay) + frameProcessor?.process(it, graphicOverlay) } } } catch (t: Exception) { Log.e(TAG, "Exception thrown from receiver.", t) } finally { //Let the processor close image as it's required until frame is processed - //data?.close() + data?.close() } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index f0ec2dc6ce..c9579b9ab4 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -70,13 +70,13 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( cameraSource?.start(surfaceView.holder, object : CameraStartCallback{ override fun onSuccess() { post { - /*requestLayout() + requestLayout() graphicOverlay?.let { overlay -> cameraSource?.let { overlay.setCameraInfo(it) } overlay.clear() - }*/ + } startRequested = false startProcessing = false } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt index 80700d7e6b..ecfbaa662e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt @@ -17,13 +17,14 @@ package com.google.mlkit.md.camera import android.media.Image +import com.google.android.odml.image.MlImage import java.nio.ByteBuffer /** An interface to process the input camera frame and perform detection on it. */ interface Frame2Processor { /** Processes the input frame with the underlying detector. */ - fun process(image: Image, rotation: Int, graphicOverlay: GraphicOverlay) + fun process(image: MlImage, graphicOverlay: GraphicOverlay) /** Stops the underlying detector and release resources. */ fun stop() diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt index faf7b231b1..e608ee9111 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt @@ -23,6 +23,8 @@ import androidx.annotation.GuardedBy import com.google.android.gms.tasks.OnFailureListener import com.google.android.gms.tasks.Task import com.google.android.gms.tasks.TaskExecutors +import com.google.android.odml.image.MediaImageExtractor +import com.google.android.odml.image.MlImage import com.google.mlkit.md.* import com.google.mlkit.vision.common.InputImage @@ -31,47 +33,39 @@ abstract class Frame2ProcessorBase : Frame2Processor { // To keep the latest frame and its metadata. @GuardedBy("this") - private var latestFrame: Image? = null - - @GuardedBy("this") - private var latestFrameRotation: Int? = null + private var latestFrame: MlImage? = null // To keep the frame and metadata in process. @GuardedBy("this") - private var processingFrame: Image? = null + private var processingFrame: MlImage? = null - @GuardedBy("this") - private var processingFrameRotation: Int? = null private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) @Synchronized - override fun process(image: Image, rotation: Int, graphicOverlay: GraphicOverlay) { + override fun process(image: MlImage, graphicOverlay: GraphicOverlay) { latestFrame = image - latestFrameRotation = rotation - if (processingFrame == null && processingFrameRotation == null) { + if (processingFrame == null) { processLatestFrame(graphicOverlay) } } @Synchronized private fun processLatestFrame(graphicOverlay: GraphicOverlay) { - processingFrame?.close() processingFrame = latestFrame - processingFrameRotation = latestFrameRotation latestFrame = null - latestFrameRotation = null val frame = processingFrame ?: return - val frameRotation = processingFrameRotation ?: return - val image = InputImage.fromMediaImage(frame, frameRotation) - val startMs = SystemClock.elapsedRealtime() - detectInImage(image) + //val startMs = SystemClock.elapsedRealtime() + detectInImage(frame) .addOnSuccessListener(executor) { results: T -> //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") - this@Frame2ProcessorBase.onSuccess(Camera2InputInfo(frame, frameRotation), results, graphicOverlay) - processLatestFrame(graphicOverlay) + MediaImageExtractor.extract(frame).let { + this@Frame2ProcessorBase.onSuccess(CameraInputInfo(it.planes[0].buffer, FrameMetadata(frame.width, + frame.height,frame.rotation)), results, graphicOverlay) + processLatestFrame(graphicOverlay) + } } .addOnFailureListener(executor) { e -> OnFailureListener { - Log.d(TAG, "Detect In Image Failure: ${e.message}") + //Log.d(TAG, "Detect In Image Failure: ${e.message}") this@Frame2ProcessorBase.onFailure(it) } } } @@ -80,7 +74,7 @@ abstract class Frame2ProcessorBase : Frame2Processor { executor.shutdown() } - protected abstract fun detectInImage(image: InputImage): Task + protected abstract fun detectInImage(image: MlImage): Task /** Be called when the detection succeeds. */ protected abstract fun onSuccess( diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt index 9fbd32c21e..e402c7f363 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt @@ -97,14 +97,14 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr */ fun setCameraInfo(cameraSource: Camera2Source) { val previewSize = cameraSource.previewSize ?: return - /*if (Utils.isPortraitMode(context)) { + if (Utils.isPortraitMode(context)) { // Swap width and height when in portrait, since camera's natural orientation is landscape. previewWidth = previewSize.height previewHeight = previewSize.width - } else {*/ + } else { previewWidth = previewSize.width previewHeight = previewSize.height - /*}*/ + } } fun translateX(x: Float): Float = x * widthScaleFactor From 31c9857fed73236e3c2ea6b605c59b53ac0fd176 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Sun, 30 Jul 2023 22:16:33 +0530 Subject: [PATCH 06/18] Fixed the frame buffer leaking issue and some code cleanup. --- .../md/barcodedetection/Barcode2Processor.kt | 2 +- .../google/mlkit/md/camera/Camera2Source.kt | 21 ++++--- .../google/mlkit/md/camera/Frame2Processor.kt | 2 +- .../mlkit/md/camera/Frame2ProcessorBase.kt | 59 ++++++++----------- 4 files changed, 42 insertions(+), 42 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt index d718fbbe5b..e502f6fc87 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt @@ -103,7 +103,7 @@ class Barcode2Processor(graphicOverlay: GraphicOverlay, private val workflowMode } } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Barcode detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index a2ea95d766..9c790651bf 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -284,9 +284,6 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { camera = null } - cameraThread.quitSafely() - imageReaderThread.quitSafely() - } /** Stops the camera and releases the resources of the camera and underlying detector. */ @@ -295,6 +292,8 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { synchronized(processorLock) { stop() frameProcessor?.stop() + cameraThread.quitSafely() + imageReaderThread.quitSafely() } } @@ -523,13 +522,21 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { synchronized(processorLock) { data?.let { //Log.d(TAG, "Processing Next Frame ${it.width} x ${it.height}") - frameProcessor?.process(it, graphicOverlay) + if(frameProcessor?.process(it, graphicOverlay) == true){ + //Do nothing as frame processor accepted the image for processing + // and it will close the image once the detection gets completed on it + } + else{ + //Close image immediately because either frame processor is + // not set or it's currently busy processing previous image + it.close() + } } } } catch (t: Exception) { Log.e(TAG, "Exception thrown from receiver.", t) - } finally { - //Let the processor close image as it's required until frame is processed + //precautionary image close request in-case there is an exception occurred + // while submitting the image to the frame processor data?.close() } } @@ -544,7 +551,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private const val TAG = "CameraSource" /** Maximum number of images that will be held in the reader's buffer */ - private const val IMAGE_BUFFER_SIZE: Int = 4 + private const val IMAGE_BUFFER_SIZE: Int = 3 private const val MIN_CAMERA_PREVIEW_WIDTH = 400 private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt index ecfbaa662e..c44f295cdf 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt @@ -24,7 +24,7 @@ import java.nio.ByteBuffer interface Frame2Processor { /** Processes the input frame with the underlying detector. */ - fun process(image: MlImage, graphicOverlay: GraphicOverlay) + fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean /** Stops the underlying detector and release resources. */ fun stop() diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt index e608ee9111..a6b894288c 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt @@ -16,58 +16,51 @@ package com.google.mlkit.md.camera -import android.media.Image -import android.os.SystemClock -import android.util.Log import androidx.annotation.GuardedBy -import com.google.android.gms.tasks.OnFailureListener import com.google.android.gms.tasks.Task import com.google.android.gms.tasks.TaskExecutors import com.google.android.odml.image.MediaImageExtractor import com.google.android.odml.image.MlImage import com.google.mlkit.md.* -import com.google.mlkit.vision.common.InputImage /** Abstract base class of [FrameProcessor]. */ abstract class Frame2ProcessorBase : Frame2Processor { - // To keep the latest frame and its metadata. + // To keep the reference of current detection task @GuardedBy("this") - private var latestFrame: MlImage? = null - - // To keep the frame and metadata in process. - @GuardedBy("this") - private var processingFrame: MlImage? = null + private var currentDetectionTask: Task? = null private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) @Synchronized - override fun process(image: MlImage, graphicOverlay: GraphicOverlay) { - latestFrame = image - if (processingFrame == null) { - processLatestFrame(graphicOverlay) - } + override fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return processLatestFrame(image, graphicOverlay) } @Synchronized - private fun processLatestFrame(graphicOverlay: GraphicOverlay) { - processingFrame = latestFrame - latestFrame = null - val frame = processingFrame ?: return - //val startMs = SystemClock.elapsedRealtime() - detectInImage(frame) - .addOnSuccessListener(executor) { results: T -> - //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") - MediaImageExtractor.extract(frame).let { - this@Frame2ProcessorBase.onSuccess(CameraInputInfo(it.planes[0].buffer, FrameMetadata(frame.width, - frame.height,frame.rotation)), results, graphicOverlay) - processLatestFrame(graphicOverlay) + private fun processLatestFrame(frame: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return if(currentDetectionTask?.isComplete == false){ + false + }else { + //val startMs = SystemClock.elapsedRealtime() + currentDetectionTask = detectInImage(frame).addOnCompleteListener(executor) { task -> + if (task.isSuccessful){ + //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") + MediaImageExtractor.extract(frame).let { + this@Frame2ProcessorBase.onSuccess(CameraInputInfo(it.planes[0].buffer, FrameMetadata(frame.width, + frame.height,frame.rotation)), task.result, graphicOverlay) + } } + else{ + //Log.d(TAG, "Detect In Image Failure: ${e.message}") + this@Frame2ProcessorBase.onFailure(task.exception) + } + + //Close the processing frame + frame.close() } - .addOnFailureListener(executor) { e -> OnFailureListener { - //Log.d(TAG, "Detect In Image Failure: ${e.message}") - this@Frame2ProcessorBase.onFailure(it) } - } + return true + } } override fun stop() { @@ -83,7 +76,7 @@ abstract class Frame2ProcessorBase : Frame2Processor { graphicOverlay: GraphicOverlay ) - protected abstract fun onFailure(e: Exception) + protected abstract fun onFailure(e: Exception?) companion object { private const val TAG = "FrameProcessorBase" From c573810bb58236b97a7f48ea98bcf97cd8ff5aaa Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Tue, 1 Aug 2023 14:21:31 +0530 Subject: [PATCH 07/18] Updated the preferences to use Camera 2 API for resolving resolutions. --- .../mlkit/md/settings/SettingsFragment.kt | 34 ++++++++++++++++--- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt index ac4b3d1d38..a93598ddf5 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt @@ -16,18 +16,44 @@ package com.google.mlkit.md.settings +import android.content.Context import android.hardware.Camera +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraManager import android.os.Bundle import androidx.preference.ListPreference import androidx.preference.PreferenceFragmentCompat import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.md.R import com.google.mlkit.md.Utils +import com.google.mlkit.md.camera.Camera2Source +import java.io.IOException import java.util.HashMap /** Configures App settings. */ class SettingsFragment : PreferenceFragmentCompat() { + /** Detects, characterizes, and connects to a CameraDevice (used for all camera operations) */ + private val cameraManager: CameraManager by lazy { + requireContext().getSystemService(Context.CAMERA_SERVICE) as CameraManager + } + + /** [cameraId] corresponding to the provided Camera facing back property */ + private val cameraId: String by lazy { + cameraManager.cameraIdList.forEach { + val characteristics = cameraManager.getCameraCharacteristics(it) + if (characteristics.get(CameraCharacteristics.LENS_FACING) == Camera2Source.CAMERA_FACING_BACK){ + return@lazy it + } + } + throw IOException("No Camera found matching the back facing lens ${Camera2Source.CAMERA_FACING_BACK}") + } + + /** [CameraCharacteristics] corresponding to the provided Camera ID */ + private val characteristics: CameraCharacteristics by lazy { + cameraManager.getCameraCharacteristics(cameraId) + } + override fun onCreatePreferences(bundle: Bundle?, rootKey: String?) { setPreferencesFromResource(R.xml.preferences, rootKey) setUpRearCameraPreviewSizePreference() @@ -37,11 +63,11 @@ class SettingsFragment : PreferenceFragmentCompat() { val previewSizePreference = findPreference(getString(R.string.pref_key_rear_camera_preview_size))!! - var camera: Camera? = null + //var camera: Camera? = null try { - camera = Camera.open(CameraSource.CAMERA_FACING_BACK) - val previewSizeList = Utils.generateValidPreviewSizeList(camera!!) + //camera = Camera.open(CameraSource.CAMERA_FACING_BACK) + val previewSizeList = Utils.generateValidPreviewSizeList(characteristics) val previewSizeStringValues = arrayOfNulls(previewSizeList.size) val previewToPictureSizeStringMap = HashMap() for (i in previewSizeList.indices) { @@ -69,7 +95,7 @@ class SettingsFragment : PreferenceFragmentCompat() { // If there's no camera for the given camera id, hide the corresponding preference. previewSizePreference.parent?.removePreference(previewSizePreference) } finally { - camera?.release() + //camera?.release() } } } From 349911393a22407b3e521c544097775cff4c8fac Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Tue, 1 Aug 2023 14:27:41 +0530 Subject: [PATCH 08/18] Logging to debug the aspect ratio issue. --- .../java/com/google/mlkit/md/camera/Camera2Source.kt | 10 ++++++++-- .../com/google/mlkit/md/camera/Camera2SourcePreview.kt | 6 ++++++ .../java/com/google/mlkit/md/camera/CameraSource.kt | 9 +++++++-- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index 9c790651bf..ec085742be 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -548,7 +548,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { const val CAMERA_FACING_BACK = CameraCharacteristics.LENS_FACING_BACK const val IMAGE_FORMAT = ImageFormat.YUV_420_888 - private const val TAG = "CameraSource" + private const val TAG = "Camera2Source" /** Maximum number of images that will be held in the reader's buffer */ private const val IMAGE_BUFFER_SIZE: Int = 3 @@ -586,25 +586,31 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { var selectedPair: CameraSizePair? = null // Picks the preview size that has closest aspect ratio to display view. var minAspectRatioDiff = Float.MAX_VALUE - + Log.d(TAG, "Display Aspect Ratio: $displayAspectRatioInLandscape") for (sizePair in validPreviewSizes) { val previewSize = sizePair.preview + Log.d(TAG, "Preview Size: $previewSize") if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { continue } val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + Log.d(TAG, "Preview Aspect Ratio: $previewAspectRatio Aspect Ratio Diff: $aspectRatioDiff Min Aspect Ratio: $minAspectRatioDiff") if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { selectedPair = sizePair + Log.d(TAG, "Selected Pair with Minimum Tolerance: ${selectedPair.preview} ${selectedPair.picture}") } } else if (aspectRatioDiff < minAspectRatioDiff) { minAspectRatioDiff = aspectRatioDiff selectedPair = sizePair + Log.d(TAG, "Selected Pair with Minimum Aspect ratio difference: ${selectedPair.preview} ${selectedPair.picture}") } } + Log.d(TAG, "Final Selected Pair: ${selectedPair?.preview} ${selectedPair?.picture}") + if (selectedPair == null) { // Picks the one that has the minimum sum of the differences between the desired values and // the actual values for width and height. diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index c9579b9ab4..9520274e77 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -110,6 +110,12 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( // Match the width of the child view to its parent. val childHeight = (layoutWidth / previewSizeRatio).toInt() + + Log.d(TAG, "Layout: $layoutWidth x $layoutHeight") + Log.d(TAG, "Camera Preview: $cameraPreviewSize") + Log.d(TAG, "Child Height: $childHeight") + + if (childHeight <= layoutHeight) { for (i in 0 until childCount) { getChildAt(i).layout(0, 0, layoutWidth, childHeight) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index 5444268654..c8fc71f695 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -456,25 +456,30 @@ class CameraSource(private val graphicOverlay: GraphicOverlay) { var selectedPair: CameraSizePair? = null // Picks the preview size that has closest aspect ratio to display view. var minAspectRatioDiff = Float.MAX_VALUE - + Log.d(TAG, "Display Aspect Ratio: $displayAspectRatioInLandscape") for (sizePair in validPreviewSizes) { val previewSize = sizePair.preview + Log.d(TAG, "Preview Size: $previewSize") if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { continue } - val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + Log.d(TAG, "Preview Aspect Ratio: $previewAspectRatio Aspect Ratio Diff: $aspectRatioDiff Min Aspect Ratio: $minAspectRatioDiff") if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { selectedPair = sizePair + Log.d(TAG, "Selected Pair with Minimum Tolerance: ${selectedPair.preview} ${selectedPair.picture}") } } else if (aspectRatioDiff < minAspectRatioDiff) { minAspectRatioDiff = aspectRatioDiff selectedPair = sizePair + Log.d(TAG, "Selected Pair with Minimum Aspect ratio difference: ${selectedPair.preview} ${selectedPair.picture}") } } + Log.d(TAG, "Final Selected Pair: ${selectedPair?.preview} ${selectedPair?.picture}") + if (selectedPair == null) { // Picks the one that has the minimum sum of the differences between the desired values and // the actual values for width and height. From 7d9d984e66fd6cdabd54a95fedaccd8021d2155d Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Thu, 10 Aug 2023 22:12:40 +0530 Subject: [PATCH 09/18] Revert "Logging to debug the aspect ratio issue." This reverts commit 349911393a22407b3e521c544097775cff4c8fac. --- .../java/com/google/mlkit/md/camera/Camera2Source.kt | 10 ++-------- .../com/google/mlkit/md/camera/Camera2SourcePreview.kt | 6 ------ .../java/com/google/mlkit/md/camera/CameraSource.kt | 9 ++------- 3 files changed, 4 insertions(+), 21 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index ec085742be..9c790651bf 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -548,7 +548,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { const val CAMERA_FACING_BACK = CameraCharacteristics.LENS_FACING_BACK const val IMAGE_FORMAT = ImageFormat.YUV_420_888 - private const val TAG = "Camera2Source" + private const val TAG = "CameraSource" /** Maximum number of images that will be held in the reader's buffer */ private const val IMAGE_BUFFER_SIZE: Int = 3 @@ -586,31 +586,25 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { var selectedPair: CameraSizePair? = null // Picks the preview size that has closest aspect ratio to display view. var minAspectRatioDiff = Float.MAX_VALUE - Log.d(TAG, "Display Aspect Ratio: $displayAspectRatioInLandscape") + for (sizePair in validPreviewSizes) { val previewSize = sizePair.preview - Log.d(TAG, "Preview Size: $previewSize") if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { continue } val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) - Log.d(TAG, "Preview Aspect Ratio: $previewAspectRatio Aspect Ratio Diff: $aspectRatioDiff Min Aspect Ratio: $minAspectRatioDiff") if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { selectedPair = sizePair - Log.d(TAG, "Selected Pair with Minimum Tolerance: ${selectedPair.preview} ${selectedPair.picture}") } } else if (aspectRatioDiff < minAspectRatioDiff) { minAspectRatioDiff = aspectRatioDiff selectedPair = sizePair - Log.d(TAG, "Selected Pair with Minimum Aspect ratio difference: ${selectedPair.preview} ${selectedPair.picture}") } } - Log.d(TAG, "Final Selected Pair: ${selectedPair?.preview} ${selectedPair?.picture}") - if (selectedPair == null) { // Picks the one that has the minimum sum of the differences between the desired values and // the actual values for width and height. diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index 9520274e77..c9579b9ab4 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -110,12 +110,6 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( // Match the width of the child view to its parent. val childHeight = (layoutWidth / previewSizeRatio).toInt() - - Log.d(TAG, "Layout: $layoutWidth x $layoutHeight") - Log.d(TAG, "Camera Preview: $cameraPreviewSize") - Log.d(TAG, "Child Height: $childHeight") - - if (childHeight <= layoutHeight) { for (i in 0 until childCount) { getChildAt(i).layout(0, 0, layoutWidth, childHeight) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index c8fc71f695..5444268654 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -456,30 +456,25 @@ class CameraSource(private val graphicOverlay: GraphicOverlay) { var selectedPair: CameraSizePair? = null // Picks the preview size that has closest aspect ratio to display view. var minAspectRatioDiff = Float.MAX_VALUE - Log.d(TAG, "Display Aspect Ratio: $displayAspectRatioInLandscape") + for (sizePair in validPreviewSizes) { val previewSize = sizePair.preview - Log.d(TAG, "Preview Size: $previewSize") if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { continue } + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) - Log.d(TAG, "Preview Aspect Ratio: $previewAspectRatio Aspect Ratio Diff: $aspectRatioDiff Min Aspect Ratio: $minAspectRatioDiff") if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { selectedPair = sizePair - Log.d(TAG, "Selected Pair with Minimum Tolerance: ${selectedPair.preview} ${selectedPair.picture}") } } else if (aspectRatioDiff < minAspectRatioDiff) { minAspectRatioDiff = aspectRatioDiff selectedPair = sizePair - Log.d(TAG, "Selected Pair with Minimum Aspect ratio difference: ${selectedPair.preview} ${selectedPair.picture}") } } - Log.d(TAG, "Final Selected Pair: ${selectedPair?.preview} ${selectedPair?.picture}") - if (selectedPair == null) { // Picks the one that has the minimum sum of the differences between the desired values and // the actual values for width and height. From 4499902c78997833fad4f9154ecd86e4d91770a9 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Thu, 10 Aug 2023 22:23:37 +0530 Subject: [PATCH 10/18] Fixed the camera preview appearance size by applying center crop scaling. --- .../mlkit/md/camera/Camera2SourcePreview.kt | 52 +++++++++---------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index c9579b9ab4..6becc8382f 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -25,7 +25,8 @@ import android.view.SurfaceView import android.widget.FrameLayout import com.google.mlkit.md.R import com.google.mlkit.md.Utils -import java.io.IOException +import kotlin.math.abs +import kotlin.math.roundToInt /** Preview the camera image in the screen. */ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { @@ -106,32 +107,31 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( } else { size.width.toFloat() / size.height } - } ?: layoutWidth.toFloat() / layoutHeight.toFloat() - - // Match the width of the child view to its parent. - val childHeight = (layoutWidth / previewSizeRatio).toInt() - if (childHeight <= layoutHeight) { - for (i in 0 until childCount) { - getChildAt(i).layout(0, 0, layoutWidth, childHeight) - } + } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) + + //Calculate the new surface view size by scaling the layout width/height based on aspect ratio + val newLayoutWidth: Int + val newLayoutHeight: Int + if (width < height * previewSizeRatio) { + newLayoutHeight = height + newLayoutWidth = (height * previewSizeRatio).roundToInt() } else { - // When the child view is too tall to be fitted in its parent: If the child view is - // static overlay view container (contains views such as bottom prompt chip), we apply - // the size of the parent view to it. Otherwise, we offset the top/bottom position - // equally to position it in the center of the parent. - val excessLenInHalf = (childHeight - layoutHeight) / 2 - for (i in 0 until childCount) { - val childView = getChildAt(i) - when (childView.id) { - R.id.static_overlay_container -> { - childView.layout(0, 0, layoutWidth, layoutHeight) - } - else -> { - childView.layout( - 0, -excessLenInHalf, layoutWidth, layoutHeight + excessLenInHalf - ) - } - } + newLayoutWidth = width + newLayoutHeight = (width / previewSizeRatio).roundToInt() + } + + //Apply the new width & height to surface view only in a way that it should center crop the camera preview + val excessWidthInHalf = abs(newLayoutWidth - layoutWidth) / 2 + val excessHeightInHalf = abs(newLayoutHeight - layoutHeight) / 2 + surfaceView.layout( + -excessWidthInHalf, -excessHeightInHalf, newLayoutWidth, newLayoutHeight + ) + + //Apply the actual layout width & height to rest of its child views + for (i in 0 until childCount) { + val childView = getChildAt(i) + if (!childView.equals(surfaceView)){ + childView.layout(0, 0, layoutWidth, layoutHeight) } } From 8bdcb05780d5a84453ad6cbc09b3889424a6ffc2 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Wed, 16 Aug 2023 18:20:37 +0530 Subject: [PATCH 11/18] Fixed the preview issue (happening when opening & closing camera preview multiple times) by using coroutine blocking mechanism. --- .../google/mlkit/md/camera/Camera2Source.kt | 144 ++++++++---------- 1 file changed, 67 insertions(+), 77 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index 9c790651bf..58f8ea2ddf 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -36,10 +36,15 @@ import com.google.mlkit.md.R import com.google.mlkit.md.Utils import com.google.mlkit.md.settings.PreferenceUtils import com.google.mlkit.md.utils.OrientationLiveData -import com.google.mlkit.md.utils.computeExifOrientation +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.suspendCancellableCoroutine +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock import java.io.IOException -import java.lang.IllegalStateException import java.util.* +import kotlin.coroutines.resume +import kotlin.coroutines.resumeWithException +import kotlin.coroutines.suspendCoroutine import kotlin.math.abs /** @@ -145,8 +150,10 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private val processingRunnable = FrameProcessingRunnable() private val processorLock = Object() + private val mutex = Mutex() private var frameProcessor: Frame2Processor? = null + /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. @@ -154,50 +161,34 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * @param surfaceHolder the surface holder to use for the preview frames. * @throws Exception if the supplied surface holder could not be used as the preview display. */ - @Synchronized @Throws(Exception::class) - internal fun start(surfaceHolder: SurfaceHolder, callback: CameraStartCallback) { - if (camera != null) return - - createCamera(object : CameraCreateCallback{ - override fun onSuccess(cameraDevice: CameraDevice) { - camera = cameraDevice - previewSize = getPreviewAndPictureSize(characteristics).preview.also { previewSize -> - imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also {imageReader -> - createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), object : CameraSessionCreateCallback{ - override fun onSuccess(cameraCaptureSession: CameraCaptureSession) { - session = cameraCaptureSession - captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { - addTarget(surfaceHolder.surface) - addTarget(imageReader.surface) - startPreview( this, imageReader, cameraCaptureSession) - callback.onSuccess() - } - - } - - override fun onFailure(error: Exception?) { - callback.onFailure(error) + internal fun start(surfaceHolder: SurfaceHolder) { + runBlocking { + mutex.withLock { + + if (camera != null) return@withLock + + camera = createCamera().also {cameraDevice -> + previewSize = getPreviewAndPictureSize(characteristics).preview.also { previewSize -> + imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> + session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also {cameraCaptureSession -> + captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { + addTarget(surfaceHolder.surface) + addTarget(imageReader.surface) + startPreview( this, imageReader, cameraCaptureSession) } - }, cameraHandler) + } + } } - - + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + relativeOrientation.observeForever(orientationObserver) } - } - override fun onFailure(error: Exception?) { - callback.onFailure(error) } - - }) - - processingThread = Thread(processingRunnable).apply { - processingRunnable.setActive(true) - start() } - - relativeOrientation.observeForever(orientationObserver) } /** @@ -250,38 +241,36 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * Call [.release] instead to completely shut down this camera source and release the * resources of the underlying detector. */ - @Synchronized @Throws(Exception::class) internal fun stop() { - Log.d(TAG, "Stop is called") - processingRunnable.setActive(false) - processingThread?.let { - try { - // Waits for the thread to complete to ensure that we can't have multiple threads executing - // at the same time (i.e., which would happen if we called start too quickly after stop). - it.join() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing thread interrupted on stop.") - } - processingThread = null - } + runBlocking { + mutex.withLock { + Log.d(TAG, "Stop is called") + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } - // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. - imageReader?.let { - it.setOnImageAvailableListener(null, null) - imageReader = null - } - relativeOrientation.removeObserver(orientationObserver) + // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. + imageReader?.let { + it.setOnImageAvailableListener(null, null) + imageReader = null + } - /* session?.let { - it.stopRepeating() - it.close() - session = null - }*/ + relativeOrientation.removeObserver(orientationObserver) - camera?.let { - it.close() - camera = null + camera?.let { + it.close() + camera = null + } + } } } @@ -329,19 +318,19 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * @throws Exception if camera cannot be found or preview cannot be processed. */ @Throws(Exception::class) - private fun createCamera(callback: CameraCreateCallback) { + private suspend fun createCamera(): CameraDevice = suspendCancellableCoroutine {cont -> if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { - throw IOException("Camera permission not granted") + if (cont.isActive) cont.resumeWithException(IOException("Camera permission not granted")) } cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() { - override fun onOpened(camera: CameraDevice) { - callback.onSuccess(camera) - } + override fun onOpened(camera: CameraDevice) = cont.resume(camera) override fun onDisconnected(camera: CameraDevice) { - callback.onFailure(null) + val exec = IOException("Camera $cameraId has been disconnected") + Log.e(TAG, exec.message, exec) + if (cont.isActive) cont.resumeWithException(exec) } override fun onError(camera: CameraDevice, error: Int) { @@ -355,7 +344,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } val exc = IOException("Camera $cameraId error: ($error) $msg") Log.e(TAG, exc.message, exc) - callback.onFailure(exc) + if(cont.isActive) cont.resumeWithException(exc) } }, cameraHandler) @@ -368,17 +357,18 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * @throws Exception if session cannot be created. */ @Throws(Exception::class) - private fun createCaptureSession(device: CameraDevice, targets: List, callback: CameraSessionCreateCallback, handler: Handler? = null){ + private suspend fun createCaptureSession(device: CameraDevice, targets: List, handler: Handler? = null): CameraCaptureSession = suspendCoroutine{ cont -> + // Create a capture session using the predefined targets; this also involves defining the // session state callback to be notified of when the session is ready device.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() { - override fun onConfigured(session: CameraCaptureSession) = callback.onSuccess(session) + override fun onConfigured(session: CameraCaptureSession) = cont.resume(session) override fun onConfigureFailed(session: CameraCaptureSession) { val exc = RuntimeException("Camera ${device.id} session configuration failed") Log.e(TAG, exc.message, exc) - callback.onFailure(exc) + cont.resumeWithException(exc) } }, handler) } @@ -389,7 +379,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * @throws Exception if cannot find a suitable size. */ @Throws(Exception::class) - private fun getPreviewAndPictureSize(characteristics: CameraCharacteristics): CameraSizePair { + private fun getPreviewAndPictureSize(characteristics: CameraCharacteristics): CameraSizePair { // Gives priority to the preview size specified by the user if exists. val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { From 7c0bd52a47056db1bf95949bfc248677b210c6f7 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Wed, 16 Aug 2023 18:38:44 +0530 Subject: [PATCH 12/18] Annotate the start or stop camera preview functions to be executed in Main thread only to avoid any ambiguous behaviour and some code cleanup. --- .../mlkit/md/camera/Camera2SourcePreview.kt | 37 ++++++------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt index 6becc8382f..66175afec2 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt @@ -23,6 +23,7 @@ import android.util.Size import android.view.SurfaceHolder import android.view.SurfaceView import android.widget.FrameLayout +import androidx.annotation.MainThread import com.google.mlkit.md.R import com.google.mlkit.md.Utils import kotlin.math.abs @@ -37,7 +38,6 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( } private var graphicOverlay: GraphicOverlay? = null private var startRequested = false - private var startProcessing = false private var surfaceAvailable = false private var cameraSource: Camera2Source? = null private var cameraPreviewSize: Size? = null @@ -47,6 +47,7 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) } + @MainThread @Throws(Exception::class) fun start(cameraSource: Camera2Source) { this.cameraSource = cameraSource @@ -54,6 +55,7 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( startIfReady() } + @MainThread @Throws(Exception::class) fun stop() { cameraSource?.let { @@ -65,32 +67,17 @@ class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout( @Throws(Exception::class) private fun startIfReady() { - if (startRequested && surfaceAvailable && !startProcessing) { - startProcessing = true + if (startRequested && surfaceAvailable) { Log.d(TAG, "Starting camera") - cameraSource?.start(surfaceView.holder, object : CameraStartCallback{ - override fun onSuccess() { - post { - requestLayout() - graphicOverlay?.let { overlay -> - cameraSource?.let { - overlay.setCameraInfo(it) - } - overlay.clear() - } - startRequested = false - startProcessing = false - } - - } - - override fun onFailure(error: Exception?) { - startRequested = false - startProcessing = false + cameraSource?.apply { + start(surfaceView.holder) + requestLayout() + graphicOverlay?.let { + it.setCameraInfo(this) + it.clear() } - - }) - + } + startRequested = false } } From a9f81f3d6ee8fbe47fc7bea3bc8cb55221df5358 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Wed, 23 Aug 2023 15:41:58 +0530 Subject: [PATCH 13/18] Code cleanup and class variables & functions documentation. --- .../google/mlkit/md/camera/Camera2Source.kt | 61 ++++++------------- .../google/mlkit/md/camera/Frame2Processor.kt | 5 +- .../mlkit/md/camera/Frame2ProcessorBase.kt | 2 +- .../mlkit/md/settings/PreferenceUtils.kt | 2 +- 4 files changed, 22 insertions(+), 48 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt index 58f8ea2ddf..af2731385c 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt @@ -58,24 +58,6 @@ import kotlin.math.abs * is unable to keep up with the rate of frames generated by the camera. */ -//TODO: Remove this interface once start using coroutine suspend functions -private interface CameraCreateCallback{ - fun onSuccess(cameraDevice: CameraDevice) - fun onFailure(error: Exception?) -} - -//TODO: Remove this interface once start using coroutine suspend functions -private interface CameraSessionCreateCallback{ - fun onSuccess(cameraCaptureSession: CameraCaptureSession) - fun onFailure(error: Exception?) -} - -//TODO: Remove this interface once start using coroutine suspend functions -interface CameraStartCallback{ - fun onSuccess() - fun onFailure(error: Exception?) -} - class Camera2Source(private val graphicOverlay: GraphicOverlay) { private val context: Context = graphicOverlay.context @@ -125,32 +107,33 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { /** [Handler] corresponding to [imageReaderThread] */ private val imageReaderHandler = Handler(imageReaderThread.looper) - /** Live data property for retrieving the current device orientation relative to the camera or listening to the changes in it */ + /** [OrientationLiveData] correspond to current device orientation relative to the [camera] or listening to the changes in it */ private val relativeOrientation: OrientationLiveData by lazy { OrientationLiveData(context, characteristics) } - /** Observer for listening the changes in the [relativeOrientation] live data property */ + /** [Observer] for listening the changes in the [relativeOrientation] */ private val orientationObserver = androidx.lifecycle.Observer { rotation -> Log.d(TAG, "Orientation changed: $rotation") } - /** Return the current exif orientation for processing image */ - //private var rotationDegrees:Int = 0 - - /** Returns the preview size that is currently in use by the underlying camera. */ + /** [Size] that is currently in use by the [camera] */ internal var previewSize: Size? = null private set - /** - * Dedicated thread and associated runnable for calling into the detector with frames, as the - * frames become available from the camera. - */ + /** [Thread] for detecting & processing [imageReader] frames */ private var processingThread: Thread? = null + + /** [FrameProcessingRunnable] associated with the [processingThread] */ private val processingRunnable = FrameProcessingRunnable() + /** [Object] to lock the [frameProcessor] operations */ private val processorLock = Object() + + /** [Mutex] to lock the CoroutineScope operations */ private val mutex = Mutex() + + /** [Frame2Processor] to process the frames received inside [processingRunnable] */ private var frameProcessor: Frame2Processor? = null @@ -297,18 +280,13 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { fun updateFlashMode(enabled: Boolean) { val flashAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) as Boolean if(flashAvailable){ - /*if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - cameraManager.setTorchMode(cameraId, enabled) - } - else{*/ - session?.let {session -> - captureRequest?.let { captureRequest -> - captureRequest.set(CaptureRequest.FLASH_MODE, - if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF) - updatePreview(captureRequest, session) - } + session?.let {session -> + captureRequest?.let { captureRequest -> + captureRequest.set(CaptureRequest.FLASH_MODE, + if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF) + updatePreview(captureRequest, session) } - /*}*/ + } } } @@ -352,7 +330,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } /** - * Starts a [CameraCaptureSession] and returns the configured session as callback [CameraSessionCreateCallback] + * Starts a [CameraCaptureSession] and returns the configured session * * @throws Exception if session cannot be created. */ @@ -396,13 +374,11 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { sizePair.preview.let { Log.v(TAG, "Camera preview size: $it") - //parameters.setPreviewSize(it.width, it.height) PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) } sizePair.picture?.let { pictureSize -> Log.v(TAG, "Camera picture size: $pictureSize") - //parameters.setPictureSize(pictureSize.width, pictureSize.height) PreferenceUtils.saveStringPreference( context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() ) @@ -511,7 +487,6 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { try { synchronized(processorLock) { data?.let { - //Log.d(TAG, "Processing Next Frame ${it.width} x ${it.height}") if(frameProcessor?.process(it, graphicOverlay) == true){ //Do nothing as frame processor accepted the image for processing // and it will close the image once the detection gets completed on it diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt index c44f295cdf..555f5934f2 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt @@ -16,14 +16,13 @@ package com.google.mlkit.md.camera -import android.media.Image import com.google.android.odml.image.MlImage -import java.nio.ByteBuffer /** An interface to process the input camera frame and perform detection on it. */ interface Frame2Processor { - /** Processes the input frame with the underlying detector. */ + /** Processes the input frame with the underlying detector. + * @return true if holding [MlImage] for processing otherwise return false */ fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean /** Stops the underlying detector and release resources. */ diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt index a6b894288c..582c691106 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt @@ -59,7 +59,7 @@ abstract class Frame2ProcessorBase : Frame2Processor { //Close the processing frame frame.close() } - return true + true } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt index f1dd7e7788..883c94ea02 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/PreferenceUtils.kt @@ -18,7 +18,7 @@ package com.google.mlkit.md.settings import android.content.Context import android.graphics.RectF -import android.preference.PreferenceManager +import androidx.preference.PreferenceManager import android.util.Size import androidx.annotation.StringRes import com.google.mlkit.md.R From 313681c20ff683c8afd7b924bec02dc28d4d0043 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Fri, 14 Feb 2025 21:27:37 +0530 Subject: [PATCH 14/18] - Introduced CameraSource factory pattern to support both Camera & Camera2 API source because older Camera API source is required for devices where Camera2 API not supported at hardware level. - Merged additional code base required for Camera2 API within original codebase (e.g. Camera2SourcePreview into CameraSourcePreview). - Added picture size preference over preview size in GraphicOverlay for Camera2 API support. --- .../md/CustomModelObjectDetectionActivity.kt | 7 +- .../mlkit/md/LiveBarcodeScanningActivity.kt | 19 +- .../mlkit/md/LiveObjectDetectionActivity.kt | 8 +- .../main/java/com/google/mlkit/md/Utils.kt | 60 +- .../md/barcodedetection/Barcode2Processor.kt | 122 ---- .../md/barcodedetection/BarcodeProcessor.kt | 9 +- .../{Camera2Source.kt => Camera2APISource.kt} | 266 +++++---- .../mlkit/md/camera/Camera2SourcePreview.kt | 153 ----- .../google/mlkit/md/camera/CameraAPISource.kt | 533 ++++++++++++++++++ .../google/mlkit/md/camera/CameraSource.kt | 505 +---------------- .../mlkit/md/camera/CameraSourceFactory.kt | 39 ++ .../mlkit/md/camera/CameraSourcePreview.kt | 78 +-- .../google/mlkit/md/camera/Frame2Processor.kt | 30 - .../mlkit/md/camera/Frame2ProcessorBase.kt | 84 --- .../google/mlkit/md/camera/FrameProcessor.kt | 6 + .../mlkit/md/camera/FrameProcessorBase.kt | 53 +- .../google/mlkit/md/camera/GraphicOverlay.kt | 25 +- .../objectdetection/MultiObjectProcessor.kt | 11 +- .../ProminentObjectProcessor.kt | 10 +- .../main/res/layout/activity_live_barcode.xml | 4 +- 20 files changed, 891 insertions(+), 1131 deletions(-) delete mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt rename android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/{Camera2Source.kt => Camera2APISource.kt} (87%) delete mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt delete mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt delete mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt index e78a648183..e29fdd4d8b 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt @@ -41,6 +41,7 @@ import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState import com.google.mlkit.md.camera.CameraSource +import com.google.mlkit.md.camera.CameraSourceFactory import com.google.mlkit.md.camera.CameraSourcePreview import com.google.mlkit.md.objectdetection.MultiObjectProcessor import com.google.mlkit.md.objectdetection.ProminentObjectProcessor @@ -81,7 +82,7 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@CustomModelObjectDetectionActivity) - cameraSource = CameraSource(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) promptChipAnimator = @@ -160,10 +161,10 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener R.id.flash_button -> { if (flashButton?.isSelected == true) { flashButton?.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.setFlashStatus(false) } else { flashButton?.isSelected = true - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource?.setFlashStatus(true) } } R.id.settings_button -> { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt index 82aa1712e4..a3ca2ef3d3 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt @@ -28,11 +28,12 @@ import androidx.lifecycle.Observer import androidx.lifecycle.ViewModelProviders import com.google.android.material.chip.Chip import com.google.common.base.Objects -import com.google.mlkit.md.barcodedetection.Barcode2Processor +import com.google.mlkit.md.barcodedetection.BarcodeProcessor import com.google.mlkit.md.barcodedetection.BarcodeField import com.google.mlkit.md.barcodedetection.BarcodeResultFragment -import com.google.mlkit.md.camera.Camera2Source -import com.google.mlkit.md.camera.Camera2SourcePreview +import com.google.mlkit.md.camera.CameraSource +import com.google.mlkit.md.camera.CameraSourcePreview +import com.google.mlkit.md.camera.CameraSourceFactory import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState @@ -42,8 +43,8 @@ import java.io.IOException /** Demonstrates the barcode scanning workflow using camera preview. */ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { - private var cameraSource: Camera2Source? = null - private var preview: Camera2SourcePreview? = null + private var cameraSource: CameraSource? = null + private var preview: CameraSourcePreview? = null private var graphicOverlay: GraphicOverlay? = null private var settingsButton: View? = null private var flashButton: View? = null @@ -59,7 +60,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@LiveBarcodeScanningActivity) - cameraSource = Camera2Source(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) @@ -85,7 +86,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { workflowModel?.markCameraFrozen() settingsButton?.isEnabled = true currentWorkflowState = WorkflowState.NOT_STARTED - cameraSource?.setFrameProcessor(Barcode2Processor(graphicOverlay!!, workflowModel!!)) + cameraSource?.setFrameProcessor(BarcodeProcessor(graphicOverlay!!, workflowModel!!)) workflowModel?.setWorkflowState(WorkflowState.DETECTING) } @@ -113,10 +114,10 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { flashButton?.let { if (it.isSelected) { it.isSelected = false - cameraSource?.updateFlashMode(false) + cameraSource?.setFlashStatus(false) } else { it.isSelected = true - cameraSource!!.updateFlashMode(true) + cameraSource!!.setFlashStatus(true) } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt index 453f144883..bd2804e73f 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt @@ -28,6 +28,7 @@ import android.view.View import android.view.View.OnClickListener import android.widget.ProgressBar import android.widget.TextView +import android.widget.Toast import androidx.appcompat.app.AppCompatActivity import androidx.lifecycle.Observer import androidx.lifecycle.ViewModelProviders @@ -42,6 +43,7 @@ import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.camera.WorkflowModel import com.google.mlkit.md.camera.WorkflowModel.WorkflowState import com.google.mlkit.md.camera.CameraSource +import com.google.mlkit.md.camera.CameraSourceFactory import com.google.mlkit.md.camera.CameraSourcePreview import com.google.mlkit.md.objectdetection.MultiObjectProcessor import com.google.mlkit.md.objectdetection.ProminentObjectProcessor @@ -85,7 +87,7 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { preview = findViewById(R.id.camera_preview) graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay).apply { setOnClickListener(this@LiveObjectDetectionActivity) - cameraSource = CameraSource(this) + cameraSource = CameraSourceFactory.createCameraSource(this) } promptChip = findViewById(R.id.bottom_prompt_chip) promptChipAnimator = @@ -160,10 +162,10 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { R.id.flash_button -> { if (flashButton?.isSelected == true) { flashButton?.isSelected = false - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_OFF) + cameraSource?.setFlashStatus(false) } else { flashButton?.isSelected = true - cameraSource?.updateFlashMode(Camera.Parameters.FLASH_MODE_TORCH) + cameraSource?.setFlashStatus(true) } } R.id.settings_button -> { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt index 6ddb3038c9..1cd831b7dd 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt @@ -32,24 +32,19 @@ import android.graphics.PorterDuffXfermode import android.graphics.Rect import android.graphics.RectF import android.graphics.YuvImage -import android.hardware.Camera -import android.hardware.camera2.CameraCharacteristics import android.media.Image import android.net.Uri import android.util.Log -import android.view.Surface -import android.view.SurfaceHolder import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat.checkSelfPermission import androidx.exifinterface.media.ExifInterface -import com.google.mlkit.md.camera.Camera2Source import com.google.mlkit.md.camera.CameraSizePair +import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.vision.common.InputImage import java.io.ByteArrayOutputStream import java.io.IOException import java.io.InputStream import java.nio.ByteBuffer -import java.util.ArrayList import kotlin.math.abs /** Utility class to provide helper methods. */ @@ -96,53 +91,6 @@ object Utils { fun isPortraitMode(context: Context): Boolean = context.resources.configuration.orientation == Configuration.ORIENTATION_PORTRAIT - /** - * Use [generateValidPreviewSizeList] instead. - * - * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not - * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size - * of the same aspect ratio, the picture size is paired up with the preview size. - * - * - * This is necessary because even if we don't use still pictures, the still picture size must - * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the - * preview images may be distorted on some devices. - */ - @Deprecated("This method is deprecated.") - fun generateValidPreviewSizeList(camera: Camera): List { - val parameters = camera.parameters - val supportedPreviewSizes = parameters.supportedPreviewSizes - val supportedPictureSizes = parameters.supportedPictureSizes - val validPreviewSizes = ArrayList() - for (previewSize in supportedPreviewSizes) { - val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() - - // By looping through the picture sizes in order, we favor the higher resolutions. - // We choose the highest resolution in order to support taking the full resolution - // picture later. - for (pictureSize in supportedPictureSizes) { - val pictureAspectRatio = pictureSize.width.toFloat() / pictureSize.height.toFloat() - if (abs(previewAspectRatio - pictureAspectRatio) < ASPECT_RATIO_TOLERANCE) { - validPreviewSizes.add(CameraSizePair(previewSize, pictureSize)) - break - } - } - } - - // If there are no picture sizes with the same aspect ratio as any preview sizes, allow all of - // the preview sizes and hope that the camera can handle it. Probably unlikely, but we still - // account for it. - if (validPreviewSizes.isEmpty()) { - Log.w(TAG, "No preview sizes have a corresponding same-aspect-ratio picture size.") - for (previewSize in supportedPreviewSizes) { - // The null picture size will let us know that we shouldn't set a picture size. - validPreviewSizes.add(CameraSizePair(previewSize, null)) - } - } - - return validPreviewSizes - } - /** * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size @@ -153,10 +101,10 @@ object Utils { * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the * preview images may be distorted on some devices. */ - fun generateValidPreviewSizeList(characteristics: CameraCharacteristics): List { + fun generateValidPreviewSizeList(cameraSource: CameraSource): List { - val supportedPreviewSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(SurfaceHolder::class.java) - val supportedPictureSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(Camera2Source.IMAGE_FORMAT) + val supportedPreviewSizes = cameraSource.getSupportedPreviewSizes() + val supportedPictureSizes = cameraSource.getSupportedPictureSizes() val validPreviewSizes = ArrayList() for (previewSize in supportedPreviewSizes) { val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt deleted file mode 100644 index e502f6fc87..0000000000 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/Barcode2Processor.kt +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.mlkit.md.barcodedetection - -import android.animation.ValueAnimator -import android.util.Log -import androidx.annotation.MainThread -import com.google.android.gms.tasks.Task -import com.google.android.odml.image.MlImage -import com.google.mlkit.md.InputInfo -import com.google.mlkit.md.camera.* -import com.google.mlkit.md.camera.WorkflowModel.WorkflowState -import com.google.mlkit.md.settings.PreferenceUtils -import com.google.mlkit.vision.barcode.BarcodeScanning -import com.google.mlkit.vision.barcode.common.Barcode -import com.google.mlkit.vision.common.InputImage -import java.io.IOException - -/** A processor to run the barcode detector. */ -class Barcode2Processor(graphicOverlay: GraphicOverlay, private val workflowModel: WorkflowModel) : - Frame2ProcessorBase>() { - - private val scanner = BarcodeScanning.getClient() - private val cameraReticleAnimator: CameraReticleAnimator = CameraReticleAnimator(graphicOverlay) - - override fun detectInImage(image: MlImage): Task> = - scanner.process(image) - - @MainThread - override fun onSuccess( - inputInfo: InputInfo, - results: List, - graphicOverlay: GraphicOverlay - ) { - - if (!workflowModel.isCameraLive) return - - //Log.d(TAG, "Barcode result size: ${results.size}") - - // Picks the barcode, if exists, that covers the center of graphic overlay. - - val barcodeInCenter = results.firstOrNull { barcode -> - val boundingBox = barcode.boundingBox ?: return@firstOrNull false - val box = graphicOverlay.translateRect(boundingBox) - box.contains(graphicOverlay.width / 2f, graphicOverlay.height / 2f) - } - - graphicOverlay.clear() - if (barcodeInCenter == null) { - cameraReticleAnimator.start() - graphicOverlay.add(BarcodeReticleGraphic(graphicOverlay, cameraReticleAnimator)) - workflowModel.setWorkflowState(WorkflowState.DETECTING) - } else { - cameraReticleAnimator.cancel() - val sizeProgress = PreferenceUtils.getProgressToMeetBarcodeSizeRequirement(graphicOverlay, barcodeInCenter) - if (sizeProgress < 1) { - // Barcode in the camera view is too small, so prompt user to move camera closer. - graphicOverlay.add(BarcodeConfirmingGraphic(graphicOverlay, barcodeInCenter)) - workflowModel.setWorkflowState(WorkflowState.CONFIRMING) - } else { - // Barcode size in the camera view is sufficient. - if (PreferenceUtils.shouldDelayLoadingBarcodeResult(graphicOverlay.context)) { - val loadingAnimator = createLoadingAnimator(graphicOverlay, barcodeInCenter) - loadingAnimator.start() - graphicOverlay.add(BarcodeLoadingGraphic(graphicOverlay, loadingAnimator)) - workflowModel.setWorkflowState(WorkflowState.SEARCHING) - } else { - workflowModel.setWorkflowState(WorkflowState.DETECTED) - workflowModel.detectedBarcode.setValue(barcodeInCenter) - } - } - } - graphicOverlay.invalidate() - } - - private fun createLoadingAnimator(graphicOverlay: GraphicOverlay, barcode: Barcode): ValueAnimator { - val endProgress = 1.1f - return ValueAnimator.ofFloat(0f, endProgress).apply { - duration = 2000 - addUpdateListener { - if ((animatedValue as Float).compareTo(endProgress) >= 0) { - graphicOverlay.clear() - workflowModel.setWorkflowState(WorkflowState.SEARCHED) - workflowModel.detectedBarcode.setValue(barcode) - } else { - graphicOverlay.invalidate() - } - } - } - } - - override fun onFailure(e: Exception?) { - Log.e(TAG, "Barcode detection failed!", e) - } - - override fun stop() { - super.stop() - try { - scanner.close() - } catch (e: IOException) { - Log.e(TAG, "Failed to close barcode detector!", e) - } - } - - companion object { - private const val TAG = "BarcodeProcessor" - } -} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt index 51bd006f51..63b815cb05 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/barcodedetection/BarcodeProcessor.kt @@ -20,6 +20,7 @@ import android.animation.ValueAnimator import android.util.Log import androidx.annotation.MainThread import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.InputInfo import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay @@ -39,8 +40,10 @@ class BarcodeProcessor(graphicOverlay: GraphicOverlay, private val workflowModel private val scanner = BarcodeScanning.getClient() private val cameraReticleAnimator: CameraReticleAnimator = CameraReticleAnimator(graphicOverlay) - override fun detectInImage(image: InputImage): Task> = - scanner.process(image) + override fun detectInImage(image: MlImage): Task> = scanner.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = scanner.process(image) @MainThread override fun onSuccess( @@ -105,7 +108,7 @@ class BarcodeProcessor(graphicOverlay: GraphicOverlay, private val workflowModel } } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Barcode detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt similarity index 87% rename from android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt rename to android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt index af2731385c..9fde18e60c 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2Source.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt @@ -58,29 +58,23 @@ import kotlin.math.abs * is unable to keep up with the rate of frames generated by the camera. */ -class Camera2Source(private val graphicOverlay: GraphicOverlay) { +class Camera2APISource(private val graphicOverlay: GraphicOverlay): CameraSource() { private val context: Context = graphicOverlay.context /** Detects, characterizes, and connects to a CameraDevice (used for all camera operations) */ private val cameraManager: CameraManager by lazy { - context.getSystemService(Context.CAMERA_SERVICE) as CameraManager + getCameraManager(context) } /** [cameraId] corresponding to the provided Camera facing back property */ private val cameraId: String by lazy { - cameraManager.cameraIdList.forEach { - val characteristics = cameraManager.getCameraCharacteristics(it) - if (characteristics.get(CameraCharacteristics.LENS_FACING) == CAMERA_FACING_BACK){ - return@lazy it - } - } - throw IOException("No Camera found matching the back facing lens $CAMERA_FACING_BACK") + getCameraId(context) } /** [CameraCharacteristics] corresponding to the provided Camera ID */ private val characteristics: CameraCharacteristics by lazy { - cameraManager.getCameraCharacteristics(cameraId) + getCameraCharacteristics(context) } /** The [CameraDevice] that will be used for preview */ @@ -118,8 +112,10 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } /** [Size] that is currently in use by the [camera] */ - internal var previewSize: Size? = null - private set + private var previewSize: Size? = null + + /** [Size] that is use by the [imageReader] as a preference over [previewSize] If it exists */ + private var pictureSize: Size? = null /** [Thread] for detecting & processing [imageReader] frames */ private var processingThread: Thread? = null @@ -133,46 +129,8 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { /** [Mutex] to lock the CoroutineScope operations */ private val mutex = Mutex() - /** [Frame2Processor] to process the frames received inside [processingRunnable] */ - private var frameProcessor: Frame2Processor? = null - - - /** - * Opens the camera and starts sending preview frames to the underlying detector. The supplied - * surface holder is used for the preview so frames can be displayed to the user. - * - * @param surfaceHolder the surface holder to use for the preview frames. - * @throws Exception if the supplied surface holder could not be used as the preview display. - */ - @Throws(Exception::class) - internal fun start(surfaceHolder: SurfaceHolder) { - runBlocking { - mutex.withLock { - - if (camera != null) return@withLock - - camera = createCamera().also {cameraDevice -> - previewSize = getPreviewAndPictureSize(characteristics).preview.also { previewSize -> - imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> - session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also {cameraCaptureSession -> - captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { - addTarget(surfaceHolder.surface) - addTarget(imageReader.surface) - startPreview( this, imageReader, cameraCaptureSession) - } - } - } - } - processingThread = Thread(processingRunnable).apply { - processingRunnable.setActive(true) - start() - } - relativeOrientation.observeForever(orientationObserver) - } - - } - } - } + /** [FrameProcessor] to process the frames received inside [processingRunnable] */ + private var frameProcessor: FrameProcessor? = null /** * Start the camera preview on the provided surface and process images through image reader buffer @@ -192,9 +150,15 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { //Setup listener for receiving the preview frames for processing imageReader.setOnImageAvailableListener({ - it.acquireNextImage()?.let {image -> - val rotation = relativeOrientation.value ?: 0 - processingRunnable.setNextFrame(image, rotation) + try { + it.acquireNextImage()?.let {image -> + val rotation = relativeOrientation.value ?: 0 + processingRunnable.setNextFrame(image, rotation) + } + } + catch (e: IllegalStateException){ + e.printStackTrace() + Log.e(TAG, "${e.message} At acquire next image") } }, imageReaderHandler) @@ -214,70 +178,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { session.setRepeatingRequest(captureRequest.build(), null, cameraHandler) } - /** - * Closes the camera and stops sending frames to the underlying frame detector. - * - * - * This camera source may be restarted again by calling [.start]. - * - * - * Call [.release] instead to completely shut down this camera source and release the - * resources of the underlying detector. - */ - @Throws(Exception::class) - internal fun stop() { - runBlocking { - mutex.withLock { - Log.d(TAG, "Stop is called") - processingRunnable.setActive(false) - processingThread?.let { - try { - // Waits for the thread to complete to ensure that we can't have multiple threads executing - // at the same time (i.e., which would happen if we called start too quickly after stop). - it.join() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing thread interrupted on stop.") - } - processingThread = null - } - - // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. - imageReader?.let { - it.setOnImageAvailableListener(null, null) - imageReader = null - } - - relativeOrientation.removeObserver(orientationObserver) - - camera?.let { - it.close() - camera = null - } - } - } - - } - - /** Stops the camera and releases the resources of the camera and underlying detector. */ - fun release() { - graphicOverlay.clear() - synchronized(processorLock) { - stop() - frameProcessor?.stop() - cameraThread.quitSafely() - imageReaderThread.quitSafely() - } - } - - fun setFrameProcessor(processor: Frame2Processor) { - graphicOverlay.clear() - synchronized(processorLock) { - frameProcessor?.stop() - frameProcessor = processor - } - } - - fun updateFlashMode(enabled: Boolean) { + private fun updateFlashMode(enabled: Boolean) { val flashAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) as Boolean if(flashAvailable){ session?.let {session -> @@ -357,7 +258,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * @throws Exception if cannot find a suitable size. */ @Throws(Exception::class) - private fun getPreviewAndPictureSize(characteristics: CameraCharacteristics): CameraSizePair { + private fun getPreviewAndPictureSize(cameraSource: CameraSource): CameraSizePair { // Gives priority to the preview size specified by the user if exists. val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { @@ -369,7 +270,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { } else { graphicOverlay.width.toFloat() / graphicOverlay.height } - selectSizePair(characteristics, displayAspectRatioInLandscape) + selectSizePair(cameraSource, displayAspectRatioInLandscape) } ?: throw IOException("Could not find suitable preview size.") sizePair.preview.let { @@ -386,6 +287,106 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { return sizePair } + //Camera source overrides + + override fun getSupportedPreviewSizes(): Array = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(SurfaceHolder::class.java) + + override fun getSupportedPictureSizes(): Array = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(IMAGE_FORMAT) + + override fun setFrameProcessor(processor: FrameProcessor) { + graphicOverlay.clear() + synchronized(processorLock) { + frameProcessor?.stop() + frameProcessor = processor + } + } + + override fun setFlashStatus(status: Boolean){ + if (status){ + updateFlashMode(true) + } + else{ + updateFlashMode(false) + } + } + + override fun getSelectedPreviewSize() = previewSize + + override fun getSelectedPictureSize() = pictureSize + + override fun start(surfaceHolder: SurfaceHolder) { + runBlocking { + mutex.withLock { + + if (camera != null) return@withLock + + camera = createCamera().also {cameraDevice -> + getPreviewAndPictureSize(this@Camera2APISource).also { sizePair -> + previewSize = sizePair.preview + pictureSize = sizePair.picture + val imageSize = sizePair.picture ?: sizePair.preview + imageReader = ImageReader.newInstance(imageSize.width, imageSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> + session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also {cameraCaptureSession -> + captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { + addTarget(surfaceHolder.surface) + addTarget(imageReader.surface) + startPreview( this, imageReader, cameraCaptureSession) + } + } + } + } + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + relativeOrientation.observeForever(orientationObserver) + } + + } + } + } + + override fun stop() { + runBlocking { + mutex.withLock { + Log.d(TAG, "Stop is called") + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } + + // Remove the reference image reader buffer & orientation change observer, since it will no longer be in use. + imageReader?.let { + it.setOnImageAvailableListener(null, null) + imageReader = null + } + + relativeOrientation.removeObserver(orientationObserver) + + camera?.let { + it.close() + camera = null + } + } + } + } + + override fun release() { + graphicOverlay.clear() + synchronized(processorLock) { + stop() + frameProcessor?.stop() + cameraThread.quitSafely() + imageReaderThread.quitSafely() + } + } /** * This runnable controls access to the underlying receiver, calling it to process frames when @@ -513,7 +514,7 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { const val CAMERA_FACING_BACK = CameraCharacteristics.LENS_FACING_BACK const val IMAGE_FORMAT = ImageFormat.YUV_420_888 - private const val TAG = "CameraSource" + private const val TAG = "Camera2APISource" /** Maximum number of images that will be held in the reader's buffer */ private const val IMAGE_BUFFER_SIZE: Int = 3 @@ -523,6 +524,21 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 + private fun getCameraManager(context: Context) = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager + + private fun getCameraId(context: Context): String { + val cameraManager = getCameraManager(context) + cameraManager.cameraIdList.forEach { + val characteristics = cameraManager.getCameraCharacteristics(it) + if (characteristics.get(CameraCharacteristics.LENS_FACING) == CAMERA_FACING_BACK){ + return it + } + } + throw IOException("No Camera found matching the back facing lens $CAMERA_FACING_BACK") + } + + fun getCameraCharacteristics(context: Context) = getCameraManager(context).getCameraCharacteristics(getCameraId(context)) + /** * Selects the most suitable preview and picture size, given the display aspect ratio in landscape * mode. @@ -542,11 +558,11 @@ class Camera2Source(private val graphicOverlay: GraphicOverlay) { * ratio. On some hardware, if you would only set the preview size, you will get a distorted * image. * - * @param characteristics the selected camera characteristics to select a preview size from + * @param cameraSource the selected camera source to select a preview size from * @return the selected preview and picture size pair */ - private fun selectSizePair(characteristics: CameraCharacteristics, displayAspectRatioInLandscape: Float): CameraSizePair? { - val validPreviewSizes = Utils.generateValidPreviewSizeList(characteristics) + private fun selectSizePair(cameraSource: CameraSource, displayAspectRatioInLandscape: Float): CameraSizePair? { + val validPreviewSizes = Utils.generateValidPreviewSizeList(cameraSource) var selectedPair: CameraSizePair? = null // Picks the preview size that has closest aspect ratio to display view. diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt deleted file mode 100644 index 66175afec2..0000000000 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2SourcePreview.kt +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.mlkit.md.camera - -import android.content.Context -import android.util.AttributeSet -import android.util.Log -import android.util.Size -import android.view.SurfaceHolder -import android.view.SurfaceView -import android.widget.FrameLayout -import androidx.annotation.MainThread -import com.google.mlkit.md.R -import com.google.mlkit.md.Utils -import kotlin.math.abs -import kotlin.math.roundToInt - -/** Preview the camera image in the screen. */ -class Camera2SourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { - - private val surfaceView: SurfaceView = SurfaceView(context).apply { - holder.addCallback(SurfaceCallback()) - addView(this) - } - private var graphicOverlay: GraphicOverlay? = null - private var startRequested = false - private var surfaceAvailable = false - private var cameraSource: Camera2Source? = null - private var cameraPreviewSize: Size? = null - - override fun onFinishInflate() { - super.onFinishInflate() - graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) - } - - @MainThread - @Throws(Exception::class) - fun start(cameraSource: Camera2Source) { - this.cameraSource = cameraSource - startRequested = true - startIfReady() - } - - @MainThread - @Throws(Exception::class) - fun stop() { - cameraSource?.let { - it.stop() - cameraSource = null - startRequested = false - } - } - - @Throws(Exception::class) - private fun startIfReady() { - if (startRequested && surfaceAvailable) { - Log.d(TAG, "Starting camera") - cameraSource?.apply { - start(surfaceView.holder) - requestLayout() - graphicOverlay?.let { - it.setCameraInfo(this) - it.clear() - } - } - startRequested = false - } - } - - override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) { - val layoutWidth = right - left - val layoutHeight = bottom - top - - cameraSource?.previewSize?.let { cameraPreviewSize = it } - - val previewSizeRatio = cameraPreviewSize?.let { size -> - if (Utils.isPortraitMode(context)) { - // Camera's natural orientation is landscape, so need to swap width and height. - size.height.toFloat() / size.width - } else { - size.width.toFloat() / size.height - } - } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) - - //Calculate the new surface view size by scaling the layout width/height based on aspect ratio - val newLayoutWidth: Int - val newLayoutHeight: Int - if (width < height * previewSizeRatio) { - newLayoutHeight = height - newLayoutWidth = (height * previewSizeRatio).roundToInt() - } else { - newLayoutWidth = width - newLayoutHeight = (width / previewSizeRatio).roundToInt() - } - - //Apply the new width & height to surface view only in a way that it should center crop the camera preview - val excessWidthInHalf = abs(newLayoutWidth - layoutWidth) / 2 - val excessHeightInHalf = abs(newLayoutHeight - layoutHeight) / 2 - surfaceView.layout( - -excessWidthInHalf, -excessHeightInHalf, newLayoutWidth, newLayoutHeight - ) - - //Apply the actual layout width & height to rest of its child views - for (i in 0 until childCount) { - val childView = getChildAt(i) - if (!childView.equals(surfaceView)){ - childView.layout(0, 0, layoutWidth, layoutHeight) - } - } - - try { - startIfReady() - } catch (e: Exception) { - Log.e(TAG, "Could not start camera source.", e) - } - } - - private inner class SurfaceCallback : SurfaceHolder.Callback { - override fun surfaceCreated(surface: SurfaceHolder) { - surfaceAvailable = true - try { - startIfReady() - } catch (e: Exception) { - Log.e(TAG, "Could not start camera source.", e) - } - } - - override fun surfaceDestroyed(surface: SurfaceHolder) { - surfaceAvailable = false - } - - override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { - } - } - - companion object { - private const val TAG = "CameraSourcePreview" - } -} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt new file mode 100644 index 0000000000..a29d1c28c8 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt @@ -0,0 +1,533 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.content.Context +import android.graphics.ImageFormat +import android.hardware.Camera +import android.hardware.Camera.CameraInfo +import android.hardware.Camera.Parameters +import android.util.Log +import android.util.Size +import android.view.Surface +import android.view.SurfaceHolder +import android.view.WindowManager +import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.settings.PreferenceUtils +import java.io.IOException +import java.nio.ByteBuffer +import java.util.IdentityHashMap +import kotlin.math.abs +import kotlin.math.ceil + +/** + * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This + * receives preview frames from the camera at a specified rate, sends those frames to detector as + * fast as it is able to process. + * + * + * This camera source makes a best effort to manage processing on preview frames as fast as + * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector + * is unable to keep up with the rate of frames generated by the camera. + */ +@Suppress("DEPRECATION") +class CameraAPISource(private val graphicOverlay: GraphicOverlay) : CameraSource() { + + private var camera: Camera? = null + private var parameters: Parameters? = null + private var rotationDegrees: Int = 0 + + /** Returns the preview size that is currently in use by the underlying camera. */ + internal var previewSize: Size? = null + private set + + /** + * Dedicated thread and associated runnable for calling into the detector with frames, as the + * frames become available from the camera. + */ + private var processingThread: Thread? = null + private val processingRunnable = FrameProcessingRunnable() + + private val processorLock = Object() + private var frameProcessor: FrameProcessor? = null + + /** + * Map to convert between a byte array, received from the camera, and its associated byte buffer. + * We use byte buffers internally because this is a more efficient way to call into native code + * later (avoids a potential copy). + * + * + * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's + * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces + * identity ('==') check on the keys. + */ + private val bytesToByteBuffer = IdentityHashMap() + private val context: Context = graphicOverlay.context + + private fun updateFlashMode(flashMode: String) { + val parameters = camera?.parameters + parameters?.flashMode = flashMode + camera?.parameters = parameters + } + + /** + * Opens the camera and applies the user settings. + * + * @throws IOException if camera cannot be found or preview cannot be processed. + */ + @Throws(IOException::class) + private fun createCamera(): Camera { + val camera = Camera.open() ?: throw IOException("There is no back-facing camera.") + val parameters = camera.parameters.also { + this.parameters = it + } + setPreviewAndPictureSize(this, parameters) + setRotation(camera, parameters) + + val previewFpsRange = selectPreviewFpsRange(camera) + ?: throw IOException("Could not find suitable preview frames per second range.") + parameters.setPreviewFpsRange( + previewFpsRange[Parameters.PREVIEW_FPS_MIN_INDEX], + previewFpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] + ) + + parameters.previewFormat = IMAGE_FORMAT + + if (parameters.supportedFocusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.focusMode = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO + } else { + Log.i(TAG, "Camera auto focus is not supported on this device.") + } + + camera.parameters = parameters + + camera.setPreviewCallbackWithBuffer(processingRunnable::setNextFrame) + + // Four frame buffers are needed for working with the camera: + // + // one for the frame that is currently being executed upon in doing detection + // one for the next pending frame to process immediately upon completing detection + // two for the frames that the camera uses to populate future preview images + // + // Through trial and error it appears that two free buffers, in addition to the two buffers + // used in this code, are needed for the camera to work properly. Perhaps the camera has one + // thread for acquiring images, and another thread for calling into user code. If only three + // buffers are used, then the camera will spew thousands of warning messages when detection + // takes a non-trivial amount of time. + previewSize?.let { + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + camera.addCallbackBuffer(createPreviewBuffer(it)) + } + + return camera + } + + @Throws(IOException::class) + private fun setPreviewAndPictureSize(cameraSource: CameraSource, parameters: Parameters) { + + // Gives priority to the preview size specified by the user if exists. + val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { + // Camera preview size is based on the landscape mode, so we need to also use the aspect + // ration of display in the same mode for comparison. + val displayAspectRatioInLandscape: Float = + if (Utils.isPortraitMode(graphicOverlay.context)) { + graphicOverlay.height.toFloat() / graphicOverlay.width + } else { + graphicOverlay.width.toFloat() / graphicOverlay.height + } + selectSizePair(cameraSource, displayAspectRatioInLandscape) + } ?: throw IOException("Could not find suitable preview size.") + + previewSize = sizePair.preview.also { + Log.v(TAG, "Camera preview size: $it") + parameters.setPreviewSize(it.width, it.height) + PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) + } + + sizePair.picture?.let { pictureSize -> + Log.v(TAG, "Camera picture size: $pictureSize") + parameters.setPictureSize(pictureSize.width, pictureSize.height) + PreferenceUtils.saveStringPreference( + context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() + ) + } + } + + /** + * Calculates the correct rotation for the given camera id and sets the rotation in the + * parameters. It also sets the camera's display orientation and rotation. + * + * @param parameters the camera parameters for which to set the rotation. + */ + private fun setRotation(camera: Camera, parameters: Parameters) { + val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager + val degrees = when (val deviceRotation = windowManager.defaultDisplay.rotation) { + Surface.ROTATION_0 -> 0 + Surface.ROTATION_90 -> 90 + Surface.ROTATION_180 -> 180 + Surface.ROTATION_270 -> 270 + else -> { + Log.e(TAG, "Bad device rotation value: $deviceRotation") + 0 + } + } + + val cameraInfo = CameraInfo() + Camera.getCameraInfo(CAMERA_FACING_BACK, cameraInfo) + val angle = (cameraInfo.orientation - degrees + 360) % 360 + // This corresponds to the rotation constants in FirebaseVisionImageMetadata. + this.rotationDegrees = angle + camera.setDisplayOrientation(angle) + parameters.setRotation(angle) + } + + /** + * Creates one buffer for the camera preview callback. The size of the buffer is based off of the + * camera preview size and the format of the camera image. + * + * @return a new preview buffer of the appropriate size for the current camera settings. + */ + private fun createPreviewBuffer(previewSize: Size): ByteArray { + val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) + val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() + val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 + + // Creating the byte array this way and wrapping it, as opposed to using .allocate(), + // should guarantee that there will be an array to work with. + val byteArray = ByteArray(bufferSize) + val byteBuffer = ByteBuffer.wrap(byteArray) + check(!(!byteBuffer.hasArray() || !byteBuffer.array().contentEquals(byteArray))) { + // This should never happen. If it does, then we wouldn't be passing the preview content to + // the underlying detector later. + "Failed to create valid buffer for camera source." + } + + bytesToByteBuffer[byteArray] = byteBuffer + return byteArray + } + + //Camera Source overrides + + override fun getSupportedPreviewSizes(): Array = parameters?.supportedPreviewSizes + ?.map { Size(it.width, it.height) }?.toTypedArray() ?: emptyArray() + + override fun getSupportedPictureSizes(): Array = parameters?.supportedPictureSizes + ?.map { Size(it.width, it.height) }?.toTypedArray() ?: emptyArray() + + override fun setFrameProcessor(processor: FrameProcessor) { + graphicOverlay.clear() + synchronized(processorLock) { + frameProcessor?.stop() + frameProcessor = processor + } + } + + override fun setFlashStatus(status: Boolean) { + if (status){ + updateFlashMode(Parameters.FLASH_MODE_ON) + } + else{ + updateFlashMode(Parameters.FLASH_MODE_OFF) + } + } + + override fun getSelectedPreviewSize(): Size? = previewSize + + override fun getSelectedPictureSize(): Size? = null + + @Synchronized + override fun start(surfaceHolder: SurfaceHolder) { + if (camera != null) return + + camera = createCamera().apply { + setPreviewDisplay(surfaceHolder) + startPreview() + } + + processingThread = Thread(processingRunnable).apply { + processingRunnable.setActive(true) + start() + } + } + + @Synchronized + override fun stop() { + processingRunnable.setActive(false) + processingThread?.let { + try { + // Waits for the thread to complete to ensure that we can't have multiple threads executing + // at the same time (i.e., which would happen if we called start too quickly after stop). + it.join() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing thread interrupted on stop.") + } + processingThread = null + } + + camera?.let { + it.stopPreview() + it.setPreviewCallbackWithBuffer(null) + try { + it.setPreviewDisplay(null) + } catch (e: Exception) { + Log.e(TAG, "Failed to clear camera preview: $e") + } + it.release() + camera = null + } + + // Release the reference to any image buffers, since these will no longer be in use. + bytesToByteBuffer.clear() + } + + override fun release() { + graphicOverlay.clear() + synchronized(processorLock) { + stop() + frameProcessor?.stop() + } + } + + /** + * This runnable controls access to the underlying receiver, calling it to process frames when + * available from the camera. This is designed to run detection on frames as fast as possible + * (i.e., without unnecessary context switching or waiting on the next frame). + * + * + * While detection is running on a frame, new frames may be received from the camera. As these + * frames come in, the most recent frame is held onto as pending. As soon as detection and its + * associated processing is done for the previous frame, detection on the mostly recently received + * frame will immediately start on the same thread. + */ + private inner class FrameProcessingRunnable : Runnable { + + // This lock guards all of the member variables below. + private val lock = Object() + private var active = true + + // These pending variables hold the state associated with the new frame awaiting processing. + private var pendingFrameData: ByteBuffer? = null + + /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ + fun setActive(active: Boolean) { + synchronized(lock) { + this.active = active + lock.notifyAll() + } + } + + /** + * Sets the frame data received from the camera. This adds the previous unused frame buffer (if + * present) back to the camera, and keeps a pending reference to the frame data for future use. + */ + fun setNextFrame(data: ByteArray, camera: Camera) { + synchronized(lock) { + pendingFrameData?.let { + camera.addCallbackBuffer(it.array()) + pendingFrameData = null + } + + if (!bytesToByteBuffer.containsKey(data)) { + Log.d( + TAG, + "Skipping frame. Could not find ByteBuffer associated with the image data from the camera." + ) + return + } + + pendingFrameData = bytesToByteBuffer[data] + + // Notify the processor thread if it is waiting on the next frame (see below). + lock.notifyAll() + } + } + + /** + * As long as the processing thread is active, this executes detection on frames continuously. + * The next pending frame is either immediately available or hasn't been received yet. Once it + * is available, we transfer the frame info to local variables and run detection on that frame. + * It immediately loops back for the next frame without pausing. + * + * + * If detection takes longer than the time in between new frames from the camera, this will + * mean that this loop will run without ever waiting on a frame, avoiding any context switching + * or frame acquisition time latency. + * + * + * If you find that this is using more CPU than you'd like, you should probably decrease the + * FPS setting above to allow for some idle time in between frames. + */ + override fun run() { + var data: ByteBuffer? + + while (true) { + synchronized(lock) { + while (active && pendingFrameData == null) { + try { + // Wait for the next frame to be received from the camera, since we don't have it yet. + lock.wait() + } catch (e: InterruptedException) { + Log.e(TAG, "Frame processing loop terminated.", e) + return + } + } + + if (!active) { + // Exit the loop once this camera source is stopped or released. We check this here, + // immediately after the wait() above, to handle the case where setActive(false) had + // been called, triggering the termination of this loop. + return + } + + // Hold onto the frame data locally, so that we can use this for detection + // below. We need to clear pendingFrameData to ensure that this buffer isn't + // recycled back to the camera before we are done using that data. + data = pendingFrameData + pendingFrameData = null + } + + try { + synchronized(processorLock) { + val frameMetadata = FrameMetadata(previewSize!!.width, previewSize!!.height, rotationDegrees) + data?.let { + frameProcessor?.process(it, frameMetadata, graphicOverlay) + } + } + } catch (t: Exception) { + Log.e(TAG, "Exception thrown from receiver.", t) + } finally { + data?.let { + camera?.addCallbackBuffer(it.array()) + } + } + } + } + } + + companion object { + + const val CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK + + private const val TAG = "CameraAPISource" + + private const val IMAGE_FORMAT = ImageFormat.NV21 + private const val MIN_CAMERA_PREVIEW_WIDTH = 400 + private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 + private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 + private const val REQUESTED_CAMERA_FPS = 30.0f + + /** + * Selects the most suitable preview and picture size, given the display aspect ratio in landscape + * mode. + * + * + * It's firstly trying to pick the one that has closest aspect ratio to display view with its + * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there are multiple candidates, choose the one having longest + * width. + * + * + * If the above looking up failed, chooses the one that has the minimum sum of the differences + * between the desired values and the actual values for width and height. + * + * + * Even though we only need to find the preview size, it's necessary to find both the preview + * size and the picture size of the camera together, because these need to have the same aspect + * ratio. On some hardware, if you would only set the preview size, you will get a distorted + * image. + * + * @param cameraSource the camera source to select a preview size from + * @return the selected preview and picture size pair + */ + private fun selectSizePair(cameraSource: CameraSource, displayAspectRatioInLandscape: Float): CameraSizePair? { + val validPreviewSizes = Utils.generateValidPreviewSizeList(cameraSource) + + var selectedPair: CameraSizePair? = null + // Picks the preview size that has closest aspect ratio to display view. + var minAspectRatioDiff = Float.MAX_VALUE + + for (sizePair in validPreviewSizes) { + val previewSize = sizePair.preview + if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { + continue + } + + val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() + val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) + if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { + if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { + selectedPair = sizePair + } + } else if (aspectRatioDiff < minAspectRatioDiff) { + minAspectRatioDiff = aspectRatioDiff + selectedPair = sizePair + } + } + + if (selectedPair == null) { + // Picks the one that has the minimum sum of the differences between the desired values and + // the actual values for width and height. + var minDiff = Integer.MAX_VALUE + for (sizePair in validPreviewSizes) { + val size = sizePair.preview + val diff = + abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + + abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) + if (diff < minDiff) { + selectedPair = sizePair + minDiff = diff + } + } + } + + return selectedPair + } + + /** + * Selects the most suitable preview frames per second range. + * + * @param camera the camera to select a frames per second range from + * @return the selected preview frames per second range + */ + private fun selectPreviewFpsRange(camera: Camera): IntArray? { + // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame + // rates. + val desiredPreviewFpsScaled = (REQUESTED_CAMERA_FPS * 1000f).toInt() + + // The method for selecting the best range is to minimize the sum of the differences between + // the desired value and the upper and lower bounds of the range. This may select a range + // that the desired value is outside of, but this is often preferred. For example, if the + // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the + // range (15, 30). + var selectedFpsRange: IntArray? = null + var minDiff = Integer.MAX_VALUE + for (range in camera.parameters.supportedPreviewFpsRange) { + val deltaMin = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MIN_INDEX] + val deltaMax = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MAX_INDEX] + val diff = abs(deltaMin) + abs(deltaMax) + if (diff < minDiff) { + selectedFpsRange = range + minDiff = diff + } + } + return selectedFpsRange + } + } +} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index 5444268654..c2a83348df 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -16,90 +16,50 @@ package com.google.mlkit.md.camera -import android.content.Context -import android.graphics.ImageFormat -import android.hardware.Camera -import android.hardware.Camera.CameraInfo -import android.hardware.Camera.Parameters -import android.util.Log import android.util.Size -import android.view.Surface import android.view.SurfaceHolder -import android.view.WindowManager -import com.google.mlkit.md.R -import com.google.mlkit.md.Utils -import com.google.mlkit.md.settings.PreferenceUtils -import java.io.IOException -import java.nio.ByteBuffer -import java.util.IdentityHashMap -import kotlin.math.abs -import kotlin.math.ceil -/** - * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics). This - * receives preview frames from the camera at a specified rate, sends those frames to detector as - * fast as it is able to process. - * - * - * This camera source makes a best effort to manage processing on preview frames as fast as - * possible, while at the same time minimizing lag. As such, frames may be dropped if the detector - * is unable to keep up with the rate of frames generated by the camera. - */ -@Suppress("DEPRECATION") -class CameraSource(private val graphicOverlay: GraphicOverlay) { +abstract class CameraSource { - private var camera: Camera? = null - private var rotationDegrees: Int = 0 + /** + * Returns an array of supported preview [Size] by the Camera + */ + abstract fun getSupportedPreviewSizes(): Array - /** Returns the preview size that is currently in use by the underlying camera. */ - internal var previewSize: Size? = null - private set + /** + * Returns an array of supported picture [Size] by the Camera + */ + abstract fun getSupportedPictureSizes(): Array + + /** + * Set the [FrameProcessor] instance which is use to process the frames return by the Camera + */ + abstract fun setFrameProcessor(processor: FrameProcessor) /** - * Dedicated thread and associated runnable for calling into the detector with frames, as the - * frames become available from the camera. + * Set the [Boolean] status to turn ON or OFF the flash */ - private var processingThread: Thread? = null - private val processingRunnable = FrameProcessingRunnable() + abstract fun setFlashStatus(status: Boolean) - private val processorLock = Object() - private var frameProcessor: FrameProcessor? = null + /** + * Returns the selected preview [Size] by the Camera + */ + internal abstract fun getSelectedPreviewSize(): Size? /** - * Map to convert between a byte array, received from the camera, and its associated byte buffer. - * We use byte buffers internally because this is a more efficient way to call into native code - * later (avoids a potential copy). - * - * - * **Note:** uses IdentityHashMap here instead of HashMap because the behavior of an array's - * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces - * identity ('==') check on the keys. + * Returns the selected picture [Size] by the Camera */ - private val bytesToByteBuffer = IdentityHashMap() - private val context: Context = graphicOverlay.context + internal abstract fun getSelectedPictureSize(): Size? /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. * * @param surfaceHolder the surface holder to use for the preview frames. - * @throws IOException if the supplied surface holder could not be used as the preview display. + * @throws Exception if the supplied surface holder could not be used as the preview display. */ - @Synchronized - @Throws(IOException::class) - internal fun start(surfaceHolder: SurfaceHolder) { - if (camera != null) return - - camera = createCamera().apply { - setPreviewDisplay(surfaceHolder) - startPreview() - } - - processingThread = Thread(processingRunnable).apply { - processingRunnable.setActive(true) - start() - } - } + @Throws(Exception::class) + internal abstract fun start(surfaceHolder: SurfaceHolder) /** * Closes the camera and stops sending frames to the underlying frame detector. @@ -111,417 +71,12 @@ class CameraSource(private val graphicOverlay: GraphicOverlay) { * Call [.release] instead to completely shut down this camera source and release the * resources of the underlying detector. */ - @Synchronized - internal fun stop() { - processingRunnable.setActive(false) - processingThread?.let { - try { - // Waits for the thread to complete to ensure that we can't have multiple threads executing - // at the same time (i.e., which would happen if we called start too quickly after stop). - it.join() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing thread interrupted on stop.") - } - processingThread = null - } - - camera?.let { - it.stopPreview() - it.setPreviewCallbackWithBuffer(null) - try { - it.setPreviewDisplay(null) - } catch (e: Exception) { - Log.e(TAG, "Failed to clear camera preview: $e") - } - it.release() - camera = null - } - - // Release the reference to any image buffers, since these will no longer be in use. - bytesToByteBuffer.clear() - } - - /** Stops the camera and releases the resources of the camera and underlying detector. */ - fun release() { - graphicOverlay.clear() - synchronized(processorLock) { - stop() - frameProcessor?.stop() - } - } - - fun setFrameProcessor(processor: FrameProcessor) { - graphicOverlay.clear() - synchronized(processorLock) { - frameProcessor?.stop() - frameProcessor = processor - } - } - - fun updateFlashMode(flashMode: String) { - val parameters = camera?.parameters - parameters?.flashMode = flashMode - camera?.parameters = parameters - } - - /** - * Opens the camera and applies the user settings. - * - * @throws IOException if camera cannot be found or preview cannot be processed. - */ - @Throws(IOException::class) - private fun createCamera(): Camera { - val camera = Camera.open() ?: throw IOException("There is no back-facing camera.") - val parameters = camera.parameters - setPreviewAndPictureSize(camera, parameters) - setRotation(camera, parameters) - - val previewFpsRange = selectPreviewFpsRange(camera) - ?: throw IOException("Could not find suitable preview frames per second range.") - parameters.setPreviewFpsRange( - previewFpsRange[Parameters.PREVIEW_FPS_MIN_INDEX], - previewFpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] - ) - - parameters.previewFormat = IMAGE_FORMAT - - if (parameters.supportedFocusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { - parameters.focusMode = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO - } else { - Log.i(TAG, "Camera auto focus is not supported on this device.") - } - - camera.parameters = parameters - - camera.setPreviewCallbackWithBuffer(processingRunnable::setNextFrame) - - // Four frame buffers are needed for working with the camera: - // - // one for the frame that is currently being executed upon in doing detection - // one for the next pending frame to process immediately upon completing detection - // two for the frames that the camera uses to populate future preview images - // - // Through trial and error it appears that two free buffers, in addition to the two buffers - // used in this code, are needed for the camera to work properly. Perhaps the camera has one - // thread for acquiring images, and another thread for calling into user code. If only three - // buffers are used, then the camera will spew thousands of warning messages when detection - // takes a non-trivial amount of time. - previewSize?.let { - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - camera.addCallbackBuffer(createPreviewBuffer(it)) - } - - return camera - } - - @Throws(IOException::class) - private fun setPreviewAndPictureSize(camera: Camera, parameters: Parameters) { - - // Gives priority to the preview size specified by the user if exists. - val sizePair: CameraSizePair = PreferenceUtils.getUserSpecifiedPreviewSize(context) ?: run { - // Camera preview size is based on the landscape mode, so we need to also use the aspect - // ration of display in the same mode for comparison. - val displayAspectRatioInLandscape: Float = - if (Utils.isPortraitMode(graphicOverlay.context)) { - graphicOverlay.height.toFloat() / graphicOverlay.width - } else { - graphicOverlay.width.toFloat() / graphicOverlay.height - } - selectSizePair(camera, displayAspectRatioInLandscape) - } ?: throw IOException("Could not find suitable preview size.") - - previewSize = sizePair.preview.also { - Log.v(TAG, "Camera preview size: $it") - parameters.setPreviewSize(it.width, it.height) - PreferenceUtils.saveStringPreference(context, R.string.pref_key_rear_camera_preview_size, it.toString()) - } - - sizePair.picture?.let { pictureSize -> - Log.v(TAG, "Camera picture size: $pictureSize") - parameters.setPictureSize(pictureSize.width, pictureSize.height) - PreferenceUtils.saveStringPreference( - context, R.string.pref_key_rear_camera_picture_size, pictureSize.toString() - ) - } - } - - /** - * Calculates the correct rotation for the given camera id and sets the rotation in the - * parameters. It also sets the camera's display orientation and rotation. - * - * @param parameters the camera parameters for which to set the rotation. - */ - private fun setRotation(camera: Camera, parameters: Parameters) { - val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager - val degrees = when (val deviceRotation = windowManager.defaultDisplay.rotation) { - Surface.ROTATION_0 -> 0 - Surface.ROTATION_90 -> 90 - Surface.ROTATION_180 -> 180 - Surface.ROTATION_270 -> 270 - else -> { - Log.e(TAG, "Bad device rotation value: $deviceRotation") - 0 - } - } - - val cameraInfo = CameraInfo() - Camera.getCameraInfo(CAMERA_FACING_BACK, cameraInfo) - val angle = (cameraInfo.orientation - degrees + 360) % 360 - this.rotationDegrees = angle - camera.setDisplayOrientation(angle) - parameters.setRotation(angle) - } - - /** - * Creates one buffer for the camera preview callback. The size of the buffer is based off of the - * camera preview size and the format of the camera image. - * - * @return a new preview buffer of the appropriate size for the current camera settings. - */ - private fun createPreviewBuffer(previewSize: Size): ByteArray { - val bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT) - val sizeInBits = previewSize.height.toLong() * previewSize.width.toLong() * bitsPerPixel.toLong() - val bufferSize = ceil(sizeInBits / 8.0).toInt() + 1 - - // Creating the byte array this way and wrapping it, as opposed to using .allocate(), - // should guarantee that there will be an array to work with. - val byteArray = ByteArray(bufferSize) - val byteBuffer = ByteBuffer.wrap(byteArray) - check(!(!byteBuffer.hasArray() || !byteBuffer.array()!!.contentEquals(byteArray))) { - // This should never happen. If it does, then we wouldn't be passing the preview content to - // the underlying detector later. - "Failed to create valid buffer for camera source." - } - - bytesToByteBuffer[byteArray] = byteBuffer - return byteArray - } + @Throws(Exception::class) + internal abstract fun stop() /** - * This runnable controls access to the underlying receiver, calling it to process frames when - * available from the camera. This is designed to run detection on frames as fast as possible - * (i.e., without unnecessary context switching or waiting on the next frame). - * - * - * While detection is running on a frame, new frames may be received from the camera. As these - * frames come in, the most recent frame is held onto as pending. As soon as detection and its - * associated processing is done for the previous frame, detection on the mostly recently received - * frame will immediately start on the same thread. + * Stops the camera and releases the resources of the camera and underlying detector. */ - private inner class FrameProcessingRunnable internal constructor() : Runnable { - - // This lock guards all of the member variables below. - private val lock = Object() - private var active = true - - // These pending variables hold the state associated with the new frame awaiting processing. - private var pendingFrameData: ByteBuffer? = null - - /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ - internal fun setActive(active: Boolean) { - synchronized(lock) { - this.active = active - lock.notifyAll() - } - } - - /** - * Sets the frame data received from the camera. This adds the previous unused frame buffer (if - * present) back to the camera, and keeps a pending reference to the frame data for future use. - */ - internal fun setNextFrame(data: ByteArray, camera: Camera) { - synchronized(lock) { - pendingFrameData?.let { - camera.addCallbackBuffer(it.array()) - pendingFrameData = null - } - - if (!bytesToByteBuffer.containsKey(data)) { - Log.d( - TAG, - "Skipping frame. Could not find ByteBuffer associated with the image data from the camera." - ) - return - } - - pendingFrameData = bytesToByteBuffer[data] - - // Notify the processor thread if it is waiting on the next frame (see below). - lock.notifyAll() - } - } - - /** - * As long as the processing thread is active, this executes detection on frames continuously. - * The next pending frame is either immediately available or hasn't been received yet. Once it - * is available, we transfer the frame info to local variables and run detection on that frame. - * It immediately loops back for the next frame without pausing. - * - * - * If detection takes longer than the time in between new frames from the camera, this will - * mean that this loop will run without ever waiting on a frame, avoiding any context switching - * or frame acquisition time latency. - * - * - * If you find that this is using more CPU than you'd like, you should probably decrease the - * FPS setting above to allow for some idle time in between frames. - */ - override fun run() { - var data: ByteBuffer? - - while (true) { - synchronized(lock) { - while (active && pendingFrameData == null) { - try { - // Wait for the next frame to be received from the camera, since we don't have it yet. - lock.wait() - } catch (e: InterruptedException) { - Log.e(TAG, "Frame processing loop terminated.", e) - return - } - } - - if (!active) { - // Exit the loop once this camera source is stopped or released. We check this here, - // immediately after the wait() above, to handle the case where setActive(false) had - // been called, triggering the termination of this loop. - return - } - - // Hold onto the frame data locally, so that we can use this for detection - // below. We need to clear pendingFrameData to ensure that this buffer isn't - // recycled back to the camera before we are done using that data. - data = pendingFrameData - pendingFrameData = null - } - - try { - synchronized(processorLock) { - val frameMetadata = FrameMetadata(previewSize!!.width, previewSize!!.height, rotationDegrees) - data?.let { - frameProcessor?.process(it, frameMetadata, graphicOverlay) - } - } - } catch (t: Exception) { - Log.e(TAG, "Exception thrown from receiver.", t) - } finally { - data?.let { - camera?.addCallbackBuffer(it.array()) - } - } - } - } - } - - companion object { - - const val CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK - - private const val TAG = "CameraSource" - - private const val IMAGE_FORMAT = ImageFormat.NV21 - private const val MIN_CAMERA_PREVIEW_WIDTH = 400 - private const val MAX_CAMERA_PREVIEW_WIDTH = 1300 - private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 640 - private const val DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360 - private const val REQUESTED_CAMERA_FPS = 30.0f - - /** - * Selects the most suitable preview and picture size, given the display aspect ratio in landscape - * mode. - * - * - * It's firstly trying to pick the one that has closest aspect ratio to display view with its - * width be in the specified range [[.MIN_CAMERA_PREVIEW_WIDTH], [ ][.MAX_CAMERA_PREVIEW_WIDTH]]. If there're multiple candidates, choose the one having longest - * width. - * - * - * If the above looking up failed, chooses the one that has the minimum sum of the differences - * between the desired values and the actual values for width and height. - * - * - * Even though we only need to find the preview size, it's necessary to find both the preview - * size and the picture size of the camera together, because these need to have the same aspect - * ratio. On some hardware, if you would only set the preview size, you will get a distorted - * image. - * - * @param camera the camera to select a preview size from - * @return the selected preview and picture size pair - */ - private fun selectSizePair(camera: Camera, displayAspectRatioInLandscape: Float): CameraSizePair? { - val validPreviewSizes = Utils.generateValidPreviewSizeList(camera) - - var selectedPair: CameraSizePair? = null - // Picks the preview size that has closest aspect ratio to display view. - var minAspectRatioDiff = Float.MAX_VALUE - - for (sizePair in validPreviewSizes) { - val previewSize = sizePair.preview - if (previewSize.width < MIN_CAMERA_PREVIEW_WIDTH || previewSize.width > MAX_CAMERA_PREVIEW_WIDTH) { - continue - } - - val previewAspectRatio = previewSize.width.toFloat() / previewSize.height.toFloat() - val aspectRatioDiff = abs(displayAspectRatioInLandscape - previewAspectRatio) - if (abs(aspectRatioDiff - minAspectRatioDiff) < Utils.ASPECT_RATIO_TOLERANCE) { - if (selectedPair == null || selectedPair.preview.width < sizePair.preview.width) { - selectedPair = sizePair - } - } else if (aspectRatioDiff < minAspectRatioDiff) { - minAspectRatioDiff = aspectRatioDiff - selectedPair = sizePair - } - } - - if (selectedPair == null) { - // Picks the one that has the minimum sum of the differences between the desired values and - // the actual values for width and height. - var minDiff = Integer.MAX_VALUE - for (sizePair in validPreviewSizes) { - val size = sizePair.preview - val diff = - abs(size.width - DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH) + - abs(size.height - DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT) - if (diff < minDiff) { - selectedPair = sizePair - minDiff = diff - } - } - } - - return selectedPair - } - - /** - * Selects the most suitable preview frames per second range. - * - * @param camera the camera to select a frames per second range from - * @return the selected preview frames per second range - */ - private fun selectPreviewFpsRange(camera: Camera): IntArray? { - // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame - // rates. - val desiredPreviewFpsScaled = (REQUESTED_CAMERA_FPS * 1000f).toInt() + abstract fun release() - // The method for selecting the best range is to minimize the sum of the differences between - // the desired value and the upper and lower bounds of the range. This may select a range - // that the desired value is outside of, but this is often preferred. For example, if the - // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the - // range (15, 30). - var selectedFpsRange: IntArray? = null - var minDiff = Integer.MAX_VALUE - for (range in camera.parameters.supportedPreviewFpsRange) { - val deltaMin = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MIN_INDEX] - val deltaMax = desiredPreviewFpsScaled - range[Parameters.PREVIEW_FPS_MAX_INDEX] - val diff = abs(deltaMin) + abs(deltaMax) - if (diff < minDiff) { - selectedFpsRange = range - minDiff = diff - } - } - return selectedFpsRange - } - } -} +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt new file mode 100644 index 0000000000..57bb69392e --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt @@ -0,0 +1,39 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.camera + +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraMetadata +import android.util.Log + +object CameraSourceFactory { + + const val TAG = "CameraSourceFactory" + + fun createCameraSource(graphicOverlay: GraphicOverlay): CameraSource { + val characteristics = Camera2APISource.getCameraCharacteristics(graphicOverlay.context) + val halSupport = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) + return if (halSupport == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY){ + Log.d(TAG, "Camera API source used") + CameraAPISource(graphicOverlay) + } else { + Log.d(TAG, "Camera2 API source used") + Camera2APISource(graphicOverlay) + } + } + +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt index 2aeb5e9571..1157d3259e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourcePreview.kt @@ -23,9 +23,12 @@ import android.util.Size import android.view.SurfaceHolder import android.view.SurfaceView import android.widget.FrameLayout +import androidx.annotation.MainThread import com.google.mlkit.md.R import com.google.mlkit.md.Utils import java.io.IOException +import kotlin.math.abs +import kotlin.math.roundToInt /** Preview the camera image in the screen. */ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) { @@ -45,13 +48,16 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c graphicOverlay = findViewById(R.id.camera_preview_graphic_overlay) } - @Throws(IOException::class) + @MainThread + @Throws(Exception::class) fun start(cameraSource: CameraSource) { this.cameraSource = cameraSource startRequested = true startIfReady() } + @MainThread + @Throws(Exception::class) fun stop() { cameraSource?.let { it.stop() @@ -60,16 +66,17 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c } } - @Throws(IOException::class) + @Throws(Exception::class) private fun startIfReady() { if (startRequested && surfaceAvailable) { - cameraSource?.start(surfaceView.holder) - requestLayout() - graphicOverlay?.let { overlay -> - cameraSource?.let { - overlay.setCameraInfo(it) + Log.d(TAG, "Starting camera") + cameraSource?.apply { + start(surfaceView.holder) + requestLayout() + graphicOverlay?.let { + it.setCameraInfo(this) + it.clear() } - overlay.clear() } startRequested = false } @@ -79,7 +86,7 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c val layoutWidth = right - left val layoutHeight = bottom - top - cameraSource?.previewSize?.let { cameraPreviewSize = it } + cameraSource?.getSelectedPreviewSize()?.let { cameraPreviewSize = it } val previewSizeRatio = cameraPreviewSize?.let { size -> if (Utils.isPortraitMode(context)) { @@ -88,38 +95,37 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c } else { size.width.toFloat() / size.height } - } ?: layoutWidth.toFloat() / layoutHeight.toFloat() - - // Match the width of the child view to its parent. - val childHeight = (layoutWidth / previewSizeRatio).toInt() - if (childHeight <= layoutHeight) { - for (i in 0 until childCount) { - getChildAt(i).layout(0, 0, layoutWidth, childHeight) - } + } ?: (layoutWidth.toFloat() / layoutHeight.toFloat()) + + //Calculate the new surface view size by scaling the layout width/height based on aspect ratio + val newLayoutWidth: Int + val newLayoutHeight: Int + if (width < height * previewSizeRatio) { + newLayoutHeight = height + newLayoutWidth = (height * previewSizeRatio).roundToInt() } else { - // When the child view is too tall to be fitted in its parent: If the child view is - // static overlay view container (contains views such as bottom prompt chip), we apply - // the size of the parent view to it. Otherwise, we offset the top/bottom position - // equally to position it in the center of the parent. - val excessLenInHalf = (childHeight - layoutHeight) / 2 - for (i in 0 until childCount) { - val childView = getChildAt(i) - when (childView.id) { - R.id.static_overlay_container -> { - childView.layout(0, 0, layoutWidth, layoutHeight) - } - else -> { - childView.layout( - 0, -excessLenInHalf, layoutWidth, layoutHeight + excessLenInHalf - ) - } - } + newLayoutWidth = width + newLayoutHeight = (width / previewSizeRatio).roundToInt() + } + + //Apply the new width & height to surface view only in a way that it should center crop the camera preview + val excessWidthInHalf = abs(newLayoutWidth - layoutWidth) / 2 + val excessHeightInHalf = abs(newLayoutHeight - layoutHeight) / 2 + surfaceView.layout( + -excessWidthInHalf, -excessHeightInHalf, newLayoutWidth - excessWidthInHalf, newLayoutHeight - excessHeightInHalf + ) + + //Apply the actual layout width & height to rest of its child views + for (i in 0 until childCount) { + val childView = getChildAt(i) + if (!childView.equals(surfaceView)){ + childView.layout(0, 0, layoutWidth, layoutHeight) } } try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } @@ -129,7 +135,7 @@ class CameraSourcePreview(context: Context, attrs: AttributeSet) : FrameLayout(c surfaceAvailable = true try { startIfReady() - } catch (e: IOException) { + } catch (e: Exception) { Log.e(TAG, "Could not start camera source.", e) } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt deleted file mode 100644 index 555f5934f2..0000000000 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2Processor.kt +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.mlkit.md.camera - -import com.google.android.odml.image.MlImage - -/** An interface to process the input camera frame and perform detection on it. */ -interface Frame2Processor { - - /** Processes the input frame with the underlying detector. - * @return true if holding [MlImage] for processing otherwise return false */ - fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean - - /** Stops the underlying detector and release resources. */ - fun stop() -} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt deleted file mode 100644 index 582c691106..0000000000 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Frame2ProcessorBase.kt +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.mlkit.md.camera - -import androidx.annotation.GuardedBy -import com.google.android.gms.tasks.Task -import com.google.android.gms.tasks.TaskExecutors -import com.google.android.odml.image.MediaImageExtractor -import com.google.android.odml.image.MlImage -import com.google.mlkit.md.* - -/** Abstract base class of [FrameProcessor]. */ -abstract class Frame2ProcessorBase : Frame2Processor { - - // To keep the reference of current detection task - @GuardedBy("this") - private var currentDetectionTask: Task? = null - - private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) - - @Synchronized - override fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean { - return processLatestFrame(image, graphicOverlay) - } - - @Synchronized - private fun processLatestFrame(frame: MlImage, graphicOverlay: GraphicOverlay): Boolean { - return if(currentDetectionTask?.isComplete == false){ - false - }else { - //val startMs = SystemClock.elapsedRealtime() - currentDetectionTask = detectInImage(frame).addOnCompleteListener(executor) { task -> - if (task.isSuccessful){ - //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") - MediaImageExtractor.extract(frame).let { - this@Frame2ProcessorBase.onSuccess(CameraInputInfo(it.planes[0].buffer, FrameMetadata(frame.width, - frame.height,frame.rotation)), task.result, graphicOverlay) - } - } - else{ - //Log.d(TAG, "Detect In Image Failure: ${e.message}") - this@Frame2ProcessorBase.onFailure(task.exception) - } - - //Close the processing frame - frame.close() - } - true - } - } - - override fun stop() { - executor.shutdown() - } - - protected abstract fun detectInImage(image: MlImage): Task - - /** Be called when the detection succeeds. */ - protected abstract fun onSuccess( - inputInfo: InputInfo, - results: T, - graphicOverlay: GraphicOverlay - ) - - protected abstract fun onFailure(e: Exception?) - - companion object { - private const val TAG = "FrameProcessorBase" - } -} diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt index 9f8143d6ff..8e83c3757e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessor.kt @@ -16,14 +16,20 @@ package com.google.mlkit.md.camera +import com.google.android.odml.image.MlImage import java.nio.ByteBuffer /** An interface to process the input camera frame and perform detection on it. */ interface FrameProcessor { /** Processes the input frame with the underlying detector. */ + @Deprecated("Keeping it only to support Camera API frame processing") fun process(data: ByteBuffer, frameMetadata: FrameMetadata, graphicOverlay: GraphicOverlay) + /** Processes the input frame with the underlying detector. + * @return true if holding [MlImage] for processing otherwise return false */ + fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean + /** Stops the underlying detector and release resources. */ fun stop() } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt index 4db20238f8..d9f2e208fa 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt @@ -22,8 +22,8 @@ import androidx.annotation.GuardedBy import com.google.android.gms.tasks.OnFailureListener import com.google.android.gms.tasks.Task import com.google.android.gms.tasks.TaskExecutors -import com.google.mlkit.md.addOnFailureListener -import com.google.mlkit.md.addOnSuccessListener +import com.google.android.odml.image.MediaImageExtractor +import com.google.android.odml.image.MlImage import com.google.mlkit.md.CameraInputInfo import com.google.mlkit.md.InputInfo import com.google.mlkit.md.ScopedExecutor @@ -46,8 +46,14 @@ abstract class FrameProcessorBase : FrameProcessor { @GuardedBy("this") private var processingFrameMetaData: FrameMetadata? = null + + // To keep the reference of current detection task + @GuardedBy("this") + private var currentDetectionTask: Task? = null + private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) + @Deprecated("Keeping it only to support Camera API frame processing") @Synchronized override fun process( data: ByteBuffer, @@ -61,6 +67,7 @@ abstract class FrameProcessorBase : FrameProcessor { } } + @Deprecated("Keeping it only to support Camera API frame processing") @Synchronized private fun processLatestFrame(graphicOverlay: GraphicOverlay) { processingFrame = latestFrame @@ -83,13 +90,51 @@ abstract class FrameProcessorBase : FrameProcessor { this@FrameProcessorBase.onSuccess(CameraInputInfo(frame, frameMetaData), results, graphicOverlay) processLatestFrame(graphicOverlay) } - .addOnFailureListener(executor) { e -> OnFailureListener { this@FrameProcessorBase.onFailure(it) } } + .addOnFailureListener(executor) { e -> this@FrameProcessorBase.onFailure(e) } + } + + @Synchronized + override fun process(image: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return processLatestFrame(image, graphicOverlay) + } + + @Synchronized + private fun processLatestFrame(frame: MlImage, graphicOverlay: GraphicOverlay): Boolean { + return if(currentDetectionTask?.isComplete == false){ + false + }else { + //val startMs = SystemClock.elapsedRealtime() + currentDetectionTask = detectInImage(frame).addOnCompleteListener(executor) { task -> + if (task.isSuccessful) { + //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") + MediaImageExtractor.extract(frame).let { + this@FrameProcessorBase.onSuccess( + CameraInputInfo( + it.planes[0].buffer, FrameMetadata( + frame.width, + frame.height, frame.rotation + ) + ), task.result, graphicOverlay + ) + } + } else { + //Log.d(TAG, "Detect In Image Failure: ${e.message}") + this@FrameProcessorBase.onFailure(task.exception) + } + + //Close the processing frame + frame.close() + } + true + } } override fun stop() { executor.shutdown() } + protected abstract fun detectInImage(image: MlImage): Task + @Deprecated("Keeping it only to support Camera API frame processing") protected abstract fun detectInImage(image: InputImage): Task /** Be called when the detection succeeds. */ @@ -99,7 +144,7 @@ abstract class FrameProcessorBase : FrameProcessor { graphicOverlay: GraphicOverlay ) - protected abstract fun onFailure(e: Exception) + protected abstract fun onFailure(e: Exception?) companion object { private const val TAG = "FrameProcessorBase" diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt index e402c7f363..c3669fb3c8 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt @@ -39,6 +39,7 @@ import java.util.ArrayList * Associated [Graphic] items should use [.translateX] and [ ][.translateY] to convert to view coordinate from the preview's coordinate. */ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attrs) { + private val lock = Any() private var previewWidth: Int = 0 @@ -78,25 +79,13 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr * Sets the camera attributes for size and facing direction, which informs how to transform image * coordinates later. */ - @Deprecated("This method is deprecated. Use setCameraInfo(cameraSource: Camera2Source) instead") fun setCameraInfo(cameraSource: CameraSource) { - val previewSize = cameraSource.previewSize ?: return - if (Utils.isPortraitMode(context)) { - // Swap width and height when in portrait, since camera's natural orientation is landscape. - previewWidth = previewSize.height - previewHeight = previewSize.width - } else { - previewWidth = previewSize.width - previewHeight = previewSize.height - } - } - - /** - * Sets the camera attributes for size and facing direction, which informs how to transform image - * coordinates later. - */ - fun setCameraInfo(cameraSource: Camera2Source) { - val previewSize = cameraSource.previewSize ?: return + //Adding picture size and also as a preferred way because now with the Camera 2 API we have to + //always define the size for the preview frames where we always have to give preference to + //picture size (if it exists) as compare to preview size. This change is to fix barcode detection issue + //in-cases where picture size is higher than preview size(e.g. preview size: 1088 x 1088 & picture + // size: 3024 x 3024). + val previewSize = cameraSource.getSelectedPictureSize() ?: cameraSource.getSelectedPreviewSize() ?: return if (Utils.isPortraitMode(context)) { // Swap width and height when in portrait, since camera's natural orientation is landscape. previewWidth = previewSize.height diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt index d0f7382f52..d1b921ab4e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/MultiObjectProcessor.kt @@ -23,6 +23,7 @@ import androidx.annotation.MainThread import androidx.core.util.forEach import androidx.core.util.set import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.R @@ -31,6 +32,7 @@ import com.google.mlkit.md.camera.FrameProcessorBase import com.google.mlkit.md.settings.PreferenceUtils import com.google.mlkit.common.model.LocalModel import com.google.mlkit.md.InputInfo +import com.google.mlkit.vision.barcode.common.Barcode import com.google.mlkit.vision.common.InputImage import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions @@ -92,9 +94,10 @@ class MultiObjectProcessor( } } - override fun detectInImage(image: InputImage): Task> { - return detector.process(image) - } + override fun detectInImage(image: MlImage): Task> = detector.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = detector.process(image) @MainThread override fun onSuccess( @@ -204,7 +207,7 @@ class MultiObjectProcessor( return distance < objectSelectionDistanceThreshold } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Object detection failed!", e) } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt index 288e51bc44..d2d63de774 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ProminentObjectProcessor.kt @@ -20,6 +20,7 @@ import android.graphics.RectF import android.util.Log import androidx.annotation.MainThread import com.google.android.gms.tasks.Task +import com.google.android.odml.image.MlImage import com.google.mlkit.md.camera.CameraReticleAnimator import com.google.mlkit.md.camera.GraphicOverlay import com.google.mlkit.md.R @@ -85,9 +86,10 @@ class ProminentObjectProcessor( } } - override fun detectInImage(image: InputImage): Task> { - return detector.process(image) - } + override fun detectInImage(image: MlImage): Task> = detector.process(image) + + @Deprecated("Keeping it only to support Camera API frame processing") + override fun detectInImage(image: InputImage): Task> = detector.process(image) @MainThread override fun onSuccess( @@ -176,7 +178,7 @@ class ProminentObjectProcessor( return reticleRect.intersect(boxRect) } - override fun onFailure(e: Exception) { + override fun onFailure(e: Exception?) { Log.e(TAG, "Object detection failed!", e) } diff --git a/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml b/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml index aed19dc8da..e51b915c91 100644 --- a/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml +++ b/android/material-showcase/app/src/main/res/layout/activity_live_barcode.xml @@ -5,14 +5,14 @@ android:layout_height="match_parent" android:keepScreenOn="true"> - - + Date: Fri, 14 Feb 2025 21:50:05 +0530 Subject: [PATCH 15/18] Removed the dependency of retrieving preview sizes inside Settings fragment and pass it from currently used Camera source when opening it. --- .../md/CustomModelObjectDetectionActivity.kt | 2 +- .../mlkit/md/LiveBarcodeScanningActivity.kt | 2 +- .../mlkit/md/LiveObjectDetectionActivity.kt | 2 +- .../google/mlkit/md/camera/CameraSizePair.kt | 16 ++--- .../mlkit/md/settings/SettingsActivity.kt | 22 ++++++- .../mlkit/md/settings/SettingsFragment.kt | 60 ++++++------------- 6 files changed, 46 insertions(+), 58 deletions(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt index e29fdd4d8b..b3600d3a47 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt @@ -169,7 +169,7 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt index a3ca2ef3d3..f0e562ceee 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveBarcodeScanningActivity.kt @@ -123,7 +123,7 @@ class LiveBarcodeScanningActivity : AppCompatActivity(), OnClickListener { } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt index bd2804e73f..93ab62d9f3 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt @@ -170,7 +170,7 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { } R.id.settings_button -> { settingsButton?.isEnabled = false - startActivity(Intent(this, SettingsActivity::class.java)) + startActivity(SettingsActivity.newIntent(this, cameraSource)) } } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt index 492d5f3f0a..21a0d3c4ee 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSizePair.kt @@ -17,7 +17,9 @@ package com.google.mlkit.md.camera import android.hardware.Camera +import android.os.Parcelable import android.util.Size +import kotlinx.android.parcel.Parcelize /** * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted @@ -25,17 +27,11 @@ import android.util.Size * ratio as the preview size or the preview may end up being distorted. If the picture size is null, * then there is no picture size with the same aspect ratio as the preview size. */ -class CameraSizePair { - val preview: Size - val picture: Size? +@Parcelize +data class CameraSizePair(val preview: Size, val picture: Size?): Parcelable { - constructor(previewSize: Camera.Size, pictureSize: Camera.Size?) { - preview = Size(previewSize.width, previewSize.height) - picture = pictureSize?.let { Size(it.width, it.height) } + constructor(previewSize: Camera.Size, pictureSize: Camera.Size?) : this(Size(previewSize.width, previewSize.height), + pictureSize?.let { Size(it.width, it.height) }) { } - constructor(previewSize: Size, pictureSize: Size?) { - preview = previewSize - picture = pictureSize - } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt index e51629d765..1892380c32 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsActivity.kt @@ -16,21 +16,28 @@ package com.google.mlkit.md.settings +import android.app.Activity +import android.content.Context +import android.content.Intent import android.os.Bundle +import android.util.Log import androidx.appcompat.app.AppCompatActivity import com.google.mlkit.md.R +import com.google.mlkit.md.Utils +import com.google.mlkit.md.camera.CameraSizePair +import com.google.mlkit.md.camera.CameraSource /** Hosts the preference fragment to configure settings. */ class SettingsActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) - setContentView(R.layout.activity_settings) + val previewSizeList = intent.getParcelableArrayListExtra(EXTRA_PREVIEW_SIZE_LIST) ?: arrayListOf() supportActionBar?.setDisplayHomeAsUpEnabled(true) supportFragmentManager .beginTransaction() - .replace(R.id.settings_container, SettingsFragment()) + .replace(R.id.settings_container, SettingsFragment.newInstance(previewSizeList)) .commit() } @@ -38,4 +45,15 @@ class SettingsActivity : AppCompatActivity() { onBackPressed() return true } + + companion object { + private const val EXTRA_PREVIEW_SIZE_LIST = "extra_preview_size_list" + + fun newIntent(context: Context, cameraSource: CameraSource?) = Intent(context, SettingsActivity::class.java).apply { + cameraSource?.let { + putParcelableArrayListExtra(EXTRA_PREVIEW_SIZE_LIST, ArrayList(Utils.generateValidPreviewSizeList(it))) + } + } + } + } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt index a93598ddf5..933e5cc9a9 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/settings/SettingsFragment.kt @@ -16,58 +16,27 @@ package com.google.mlkit.md.settings -import android.content.Context -import android.hardware.Camera -import android.hardware.camera2.CameraCharacteristics -import android.hardware.camera2.CameraManager import android.os.Bundle import androidx.preference.ListPreference import androidx.preference.PreferenceFragmentCompat -import com.google.mlkit.md.camera.CameraSource import com.google.mlkit.md.R -import com.google.mlkit.md.Utils -import com.google.mlkit.md.camera.Camera2Source -import java.io.IOException -import java.util.HashMap +import com.google.mlkit.md.camera.CameraSizePair /** Configures App settings. */ class SettingsFragment : PreferenceFragmentCompat() { - /** Detects, characterizes, and connects to a CameraDevice (used for all camera operations) */ - private val cameraManager: CameraManager by lazy { - requireContext().getSystemService(Context.CAMERA_SERVICE) as CameraManager - } - - /** [cameraId] corresponding to the provided Camera facing back property */ - private val cameraId: String by lazy { - cameraManager.cameraIdList.forEach { - val characteristics = cameraManager.getCameraCharacteristics(it) - if (characteristics.get(CameraCharacteristics.LENS_FACING) == Camera2Source.CAMERA_FACING_BACK){ - return@lazy it - } - } - throw IOException("No Camera found matching the back facing lens ${Camera2Source.CAMERA_FACING_BACK}") - } - - /** [CameraCharacteristics] corresponding to the provided Camera ID */ - private val characteristics: CameraCharacteristics by lazy { - cameraManager.getCameraCharacteristics(cameraId) - } - override fun onCreatePreferences(bundle: Bundle?, rootKey: String?) { setPreferencesFromResource(R.xml.preferences, rootKey) setUpRearCameraPreviewSizePreference() } private fun setUpRearCameraPreviewSizePreference() { - val previewSizePreference = - findPreference(getString(R.string.pref_key_rear_camera_preview_size))!! - - //var camera: Camera? = null - - try { - //camera = Camera.open(CameraSource.CAMERA_FACING_BACK) - val previewSizeList = Utils.generateValidPreviewSizeList(characteristics) + val previewSizePreference = findPreference(getString(R.string.pref_key_rear_camera_preview_size))!! + val previewSizeList = arguments?.getParcelableArrayList(ARG_PREVIEW_SIZE_LIST) ?: arrayListOf() + if (previewSizeList.isEmpty()){ + previewSizePreference.parent?.removePreference(previewSizePreference) + } + else{ val previewSizeStringValues = arrayOfNulls(previewSizeList.size) val previewToPictureSizeStringMap = HashMap() for (i in previewSizeList.indices) { @@ -91,11 +60,16 @@ class SettingsFragment : PreferenceFragmentCompat() { ) true } - } catch (e: Exception) { - // If there's no camera for the given camera id, hide the corresponding preference. - previewSizePreference.parent?.removePreference(previewSizePreference) - } finally { - //camera?.release() + } + } + + companion object { + private const val ARG_PREVIEW_SIZE_LIST = "arg_preview_size_list" + + fun newInstance(previewSizeList: ArrayList) = SettingsFragment().apply { + arguments = Bundle().apply { + putParcelableArrayList(ARG_PREVIEW_SIZE_LIST, previewSizeList) + } } } } From 4aadf3c81ce3db881d3217a8e9fc5e4dc160865c Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Tue, 25 Feb 2025 14:56:30 +0530 Subject: [PATCH 16/18] Started using Camera2InputInfo class for Camera2 frame processing which required yuv to nv21 conversion before it can be used as bitmap. Introduced ConfirmedObjectInfo class to fix the issue of accessing bitmap from frame data after it has been closed. It create a distinction between detected object and confirmed object. Also, started using this class at places where confirmed object required. Moved imageData property from DetectedObjectInfo to ConfirmedObjectInfo. Also, fix a bitmap variable assignment issue in DetectedObjectInfo class. --- .../md/CustomModelObjectDetectionActivity.kt | 6 +- .../mlkit/md/LiveObjectDetectionActivity.kt | 6 +- .../mlkit/md/StaticObjectDetectionActivity.kt | 11 +- .../main/java/com/google/mlkit/md/Utils.kt | 124 ++++++++++++------ .../mlkit/md/camera/FrameProcessorBase.kt | 8 +- .../google/mlkit/md/camera/WorkflowModel.kt | 33 ++--- .../md/objectdetection/ConfirmedObjectInfo.kt | 60 +++++++++ .../md/objectdetection/DetectedObjectInfo.kt | 26 ++-- .../mlkit/md/productsearch/SearchEngine.kt | 11 +- .../mlkit/md/productsearch/SearchedObject.kt | 9 +- 10 files changed, 198 insertions(+), 96 deletions(-) create mode 100644 android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt index b3600d3a47..7d49898936 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/CustomModelObjectDetectionActivity.kt @@ -276,11 +276,11 @@ class CustomModelObjectDetectionActivity : AppCompatActivity(), OnClickListener // Observes changes on the object to search, if happens, show detected object labels as // product search results. - objectToSearch.observe(this@CustomModelObjectDetectionActivity, Observer { detectObject -> - val productList: List = detectObject.labels.map { label -> + objectToSearch.observe(this@CustomModelObjectDetectionActivity, Observer { confirmedObject -> + val productList: List = confirmedObject.labels.map { label -> Product("" /* imageUrl */, label.text, "" /* subtitle */) } - workflowModel?.onSearchCompleted(detectObject, productList) + workflowModel?.onSearchCompleted(confirmedObject, productList) }) // Observes changes on the object that has search completed, if happens, show the bottom sheet diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt index 93ab62d9f3..7b6a84ee60 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/LiveObjectDetectionActivity.kt @@ -276,9 +276,9 @@ class LiveObjectDetectionActivity : AppCompatActivity(), OnClickListener { }) // Observes changes on the object to search, if happens, fire product search request. - objectToSearch.observe(this@LiveObjectDetectionActivity, Observer { detectObject -> - searchEngine!!.search(detectObject) { detectedObject, products -> - workflowModel?.onSearchCompleted(detectedObject, products) + objectToSearch.observe(this@LiveObjectDetectionActivity, Observer { confirmObject -> + searchEngine!!.search(confirmObject) { confirmedObject, products -> + workflowModel?.onSearchCompleted(confirmedObject, products) } }) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt index 1d23a7fee6..55c07fa2ca 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/StaticObjectDetectionActivity.kt @@ -40,6 +40,7 @@ import androidx.recyclerview.widget.RecyclerView import com.google.android.material.bottomsheet.BottomSheetBehavior import com.google.android.material.chip.Chip import com.google.common.collect.ImmutableList +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.productsearch.BottomSheetScrimView import com.google.mlkit.md.objectdetection.DetectedObjectInfo import com.google.mlkit.md.objectdetection.StaticObjectDotView @@ -249,16 +250,16 @@ class StaticObjectDetectionActivity : AppCompatActivity(), View.OnClickListener } else { searchedObjectMap.clear() for (i in objects.indices) { - searchEngine?.search(DetectedObjectInfo(objects[i], i, image)) { detectedObject, products -> - onSearchCompleted(detectedObject, products) + searchEngine?.search(ConfirmedObjectInfo.from(DetectedObjectInfo(objects[i], i, image))) { confirmedObject, products -> + onSearchCompleted(confirmedObject, products) } } } } - private fun onSearchCompleted(detectedObject: DetectedObjectInfo, productList: List) { - Log.d(TAG, "Search completed for object index: ${detectedObject.objectIndex}") - searchedObjectMap[detectedObject.objectIndex] = SearchedObject(resources, detectedObject, productList) + private fun onSearchCompleted(confirmedObject: ConfirmedObjectInfo, productList: List) { + Log.d(TAG, "Search completed for object index: ${confirmedObject.objectIndex}") + searchedObjectMap[confirmedObject.objectIndex] = SearchedObject(resources, confirmedObject, productList) if (searchedObjectMap.size < detectedObjectNum) { // Hold off showing the result until the search of all detected objects completes. return diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt index 1cd831b7dd..49e4f1d0b0 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/Utils.kt @@ -25,6 +25,7 @@ import android.content.res.Configuration import android.graphics.Bitmap import android.graphics.BitmapFactory import android.graphics.Canvas +import android.graphics.ImageFormat import android.graphics.Matrix import android.graphics.Paint import android.graphics.PorterDuff @@ -46,6 +47,7 @@ import java.io.IOException import java.io.InputStream import java.nio.ByteBuffer import kotlin.math.abs +import kotlin.math.min /** Utility class to provide helper methods. */ object Utils { @@ -149,56 +151,104 @@ object Utils { /** Convert NV21 format byte buffer to bitmap. */ fun convertToBitmap(data: ByteBuffer, width: Int, height: Int, rotationDegrees: Int): Bitmap? { - data.rewind() - val imageInBuffer = ByteArray(data.limit()) - data.get(imageInBuffer, 0, imageInBuffer.size) try { - val image = YuvImage( - imageInBuffer, InputImage.IMAGE_FORMAT_NV21, width, height, null - ) - val stream = ByteArrayOutputStream() - image.compressToJpeg(Rect(0, 0, width, height), 80, stream) - val bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) - stream.close() - - // Rotate the image back to straight. - val matrix = Matrix() - matrix.postRotate(rotationDegrees.toFloat()) - return Bitmap.createBitmap(bmp, 0, 0, bmp.width, bmp.height, matrix, true) - } catch (e: java.lang.Exception) { - Log.e(TAG, "Error: " + e.message) + data.rewind() + val imageInBuffer = ByteArray(data.limit()) + data.get(imageInBuffer, 0, imageInBuffer.size) + return convertToBitmapInternal(imageInBuffer, width, height, rotationDegrees) + } + catch (e: Exception){ + Log.e(TAG, "Error at converting Byte Buffer to Bitmap: " + e.message) } return null } + /** Convert YUV_420_888 format byte buffer to bitmap. */ fun convertToBitmap(image: Image, rotationDegrees: Int): Bitmap? { try { - val buffer = image.planes[0].buffer - val bytes = ByteArray(buffer.remaining()).apply { buffer.get(this) } - - BitmapFactory.decodeByteArray(bytes, 0, bytes.size)?.let {bitmap -> - val stream = ByteArrayOutputStream() - val finalBitmap = if (bitmap.compress(Bitmap.CompressFormat.JPEG, 80, stream)){ - BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) - } - else{ - bitmap - } - stream.close() + return convertToBitmapInternal(yuv_420_888toNv21(image), image.width, image.height, rotationDegrees) + } + catch (e: Exception){ + Log.e(TAG, "Error at converting Image to Bitmap: " + e.message) + } + return null + } - // Rotate the image back to straight. - val matrix = Matrix() - matrix.postRotate(rotationDegrees.toFloat()) - return Bitmap.createBitmap(finalBitmap, 0, 0, finalBitmap.width, finalBitmap.height, matrix, true) - } + private fun convertToBitmapInternal(imageData: ByteArray, width: Int, height: Int, rotationDegrees: Int): Bitmap { + val image = YuvImage( + imageData, InputImage.IMAGE_FORMAT_NV21, width, height, null + ) + val stream = ByteArrayOutputStream() + image.compressToJpeg(Rect(0, 0, width, height), 80, stream) + val bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()) + stream.close() + + // Rotate the image back to straight. + val matrix = Matrix() + matrix.postRotate(rotationDegrees.toFloat()) + return Bitmap.createBitmap(bmp, 0, 0, bmp.width, bmp.height, matrix, true) + } + @Throws(IllegalArgumentException::class) + private fun yuv_420_888toNv21(image: Image): ByteArray { + require(image.format == ImageFormat.YUV_420_888) { + "only support ImageFormat.YUV_420_888 image conversion" + } + val yPlane = image.planes[0] + val uPlane = image.planes[1] + val vPlane = image.planes[2] + + val yBuffer = yPlane.buffer + val uBuffer = uPlane.buffer + val vBuffer = vPlane.buffer + yBuffer.rewind() + uBuffer.rewind() + vBuffer.rewind() + + val ySize = yBuffer.remaining() + + var position = 0 + // TODO(b/115743986): Pull these bytes from a pool instead of allocating for every image. + val nv21 = ByteArray(ySize + (image.width * image.height / 2)) + + // Add the full y buffer to the array. If rowStride > 1, some padding may be skipped. + for (row in 0 until image.height) { + yBuffer[nv21, position, image.width] + position += image.width + yBuffer.position( + min(ySize.toDouble(), (yBuffer.position() - image.width + yPlane.rowStride).toDouble()) + .toInt() + ) + } - } catch (e: java.lang.Exception) { - Log.e(TAG, "Error: " + e.message) + val chromaHeight = image.height / 2 + val chromaWidth = image.width / 2 + val vRowStride = vPlane.rowStride + val uRowStride = uPlane.rowStride + val vPixelStride = vPlane.pixelStride + val uPixelStride = uPlane.pixelStride + + // Interleave the u and v frames, filling up the rest of the buffer. Use two line buffers to + // perform faster bulk gets from the byte buffers. + val vLineBuffer = ByteArray(vRowStride) + val uLineBuffer = ByteArray(uRowStride) + for (row in 0 until chromaHeight) { + vBuffer[vLineBuffer, 0, min(vRowStride.toDouble(), vBuffer.remaining().toDouble()).toInt()] + uBuffer[uLineBuffer, 0, min(uRowStride.toDouble(), uBuffer.remaining().toDouble()).toInt()] + var vLineBufferPosition = 0 + var uLineBufferPosition = 0 + for (col in 0 until chromaWidth) { + nv21[position++] = vLineBuffer[vLineBufferPosition] + nv21[position++] = uLineBuffer[uLineBufferPosition] + vLineBufferPosition += vPixelStride + uLineBufferPosition += uPixelStride + } } - return null + + return nv21 } + internal fun openImagePicker(activity: Activity) { val intent = Intent(Intent.ACTION_GET_CONTENT) intent.addCategory(Intent.CATEGORY_OPENABLE) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt index d9f2e208fa..428675d0c8 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/FrameProcessorBase.kt @@ -24,6 +24,7 @@ import com.google.android.gms.tasks.Task import com.google.android.gms.tasks.TaskExecutors import com.google.android.odml.image.MediaImageExtractor import com.google.android.odml.image.MlImage +import com.google.mlkit.md.Camera2InputInfo import com.google.mlkit.md.CameraInputInfo import com.google.mlkit.md.InputInfo import com.google.mlkit.md.ScopedExecutor @@ -109,12 +110,7 @@ abstract class FrameProcessorBase : FrameProcessor { //Log.d(TAG, "Latency is: ${SystemClock.elapsedRealtime() - startMs}") MediaImageExtractor.extract(frame).let { this@FrameProcessorBase.onSuccess( - CameraInputInfo( - it.planes[0].buffer, FrameMetadata( - frame.width, - frame.height, frame.rotation - ) - ), task.result, graphicOverlay + Camera2InputInfo(it, frame.rotation), task.result, graphicOverlay ) } } else { diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt index c17a1bc4ce..e51607030e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/WorkflowModel.kt @@ -21,6 +21,7 @@ import android.content.Context import androidx.annotation.MainThread import androidx.lifecycle.AndroidViewModel import androidx.lifecycle.MutableLiveData +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo import com.google.mlkit.md.productsearch.Product import com.google.mlkit.md.productsearch.SearchedObject @@ -32,7 +33,7 @@ import java.util.HashSet class WorkflowModel(application: Application) : AndroidViewModel(application) { val workflowState = MutableLiveData() - val objectToSearch = MutableLiveData() + val objectToSearch = MutableLiveData() val searchedObject = MutableLiveData() val detectedBarcode = MutableLiveData() @@ -41,7 +42,7 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { var isCameraLive = false private set - private var confirmedObject: DetectedObjectInfo? = null + private var confirmedObject: ConfirmedObjectInfo? = null private val context: Context get() = getApplication().applicationContext @@ -74,12 +75,14 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { fun confirmingObject(confirmingObject: DetectedObjectInfo, progress: Float) { val isConfirmed = progress.compareTo(1f) == 0 if (isConfirmed) { - confirmedObject = confirmingObject - if (PreferenceUtils.isAutoSearchEnabled(context)) { - setWorkflowState(WorkflowState.SEARCHING) - triggerSearch(confirmingObject) - } else { - setWorkflowState(WorkflowState.CONFIRMED) + ConfirmedObjectInfo.from(confirmingObject).also { + confirmedObject = it + if (PreferenceUtils.isAutoSearchEnabled(context)) { + setWorkflowState(WorkflowState.SEARCHING) + triggerSearch(it) + } else { + setWorkflowState(WorkflowState.CONFIRMED) + } } } else { setWorkflowState(WorkflowState.CONFIRMING) @@ -94,15 +97,15 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { } } - private fun triggerSearch(detectedObject: DetectedObjectInfo) { - val objectId = detectedObject.objectId ?: throw NullPointerException() + private fun triggerSearch(confirmedObject: ConfirmedObjectInfo) { + val objectId = confirmedObject.objectId ?: throw NullPointerException() if (objectIdsToSearch.contains(objectId)) { // Already in searching. return } objectIdsToSearch.add(objectId) - objectToSearch.value = detectedObject + objectToSearch.value = confirmedObject } fun markCameraLive() { @@ -114,14 +117,14 @@ class WorkflowModel(application: Application) : AndroidViewModel(application) { isCameraLive = false } - fun onSearchCompleted(detectedObject: DetectedObjectInfo, products: List) { - val lConfirmedObject = confirmedObject - if (detectedObject != lConfirmedObject) { + fun onSearchCompleted(confirmedObject: ConfirmedObjectInfo, products: List) { + val lConfirmedObject = this@WorkflowModel.confirmedObject + if (confirmedObject != lConfirmedObject) { // Drops the search result from the object that has lost focus. return } - objectIdsToSearch.remove(detectedObject.objectId) + objectIdsToSearch.remove(confirmedObject.objectId) setWorkflowState(WorkflowState.SEARCHED) searchedObject.value = SearchedObject(context.resources, lConfirmedObject, products) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt new file mode 100644 index 0000000000..a98dd15876 --- /dev/null +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/ConfirmedObjectInfo.kt @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.mlkit.md.objectdetection + +import android.graphics.Bitmap +import android.graphics.Bitmap.CompressFormat +import android.graphics.Rect +import android.util.Log +import com.google.mlkit.vision.objects.DetectedObject +import java.io.ByteArrayOutputStream +import java.io.IOException + +/** + * Holds the detected object info and its related image info. + */ + +class ConfirmedObjectInfo private constructor(val objectId: Int?, val objectIndex: Int, val boundingBox: Rect, + val labels: List, val bitmap: Bitmap) { + + private var jpegBytes: ByteArray? = null + + val imageData: ByteArray? + @Synchronized get() { + if (jpegBytes == null) { + try { + ByteArrayOutputStream().use { stream -> + bitmap.compress(CompressFormat.JPEG, /* quality= */ 100, stream) + jpegBytes = stream.toByteArray() + } + } catch (e: IOException) { + Log.e(TAG, "Error getting object image data!") + } + } + return jpegBytes + } + + companion object { + private const val TAG = "ConfirmedObject" + + fun from(detectedObjectInfo: DetectedObjectInfo): ConfirmedObjectInfo{ + return ConfirmedObjectInfo(detectedObjectInfo.objectId, detectedObjectInfo.objectIndex, + detectedObjectInfo.boundingBox, detectedObjectInfo.labels, detectedObjectInfo.getBitmap()) + } + } + +} \ No newline at end of file diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt index 6ff02ba85a..c15fa28a6f 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/objectdetection/DetectedObjectInfo.kt @@ -35,26 +35,12 @@ class DetectedObjectInfo( ) { private var bitmap: Bitmap? = null - private var jpegBytes: ByteArray? = null val objectId: Int? = detectedObject.trackingId val boundingBox: Rect = detectedObject.boundingBox val labels: List = detectedObject.labels - val imageData: ByteArray? - @Synchronized get() { - if (jpegBytes == null) { - try { - ByteArrayOutputStream().use { stream -> - getBitmap().compress(CompressFormat.JPEG, /* quality= */ 100, stream) - jpegBytes = stream.toByteArray() - } - } catch (e: IOException) { - Log.e(TAG, "Error getting object image data!") - } - } - return jpegBytes - } + @Synchronized fun getBitmap(): Bitmap { @@ -67,11 +53,15 @@ class DetectedObjectInfo( boundingBox.width(), boundingBox.height() ) - if (createdBitmap.width > MAX_IMAGE_WIDTH) { + (if (createdBitmap.width > MAX_IMAGE_WIDTH) { val dstHeight = (MAX_IMAGE_WIDTH.toFloat() / createdBitmap.width * createdBitmap.height).toInt() - bitmap = Bitmap.createScaledBitmap(createdBitmap, MAX_IMAGE_WIDTH, dstHeight, /* filter= */ false) + Bitmap.createScaledBitmap(createdBitmap, MAX_IMAGE_WIDTH, dstHeight, /* filter= */ false) + } + else{ + createdBitmap + }).also { + bitmap = it } - createdBitmap } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt index d9ba592cd3..dec25b172e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchEngine.kt @@ -22,6 +22,7 @@ import com.android.volley.RequestQueue import com.android.volley.toolbox.JsonObjectRequest import com.android.volley.toolbox.Volley import com.google.android.gms.tasks.Tasks +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo import java.util.ArrayList import java.util.concurrent.Callable @@ -35,11 +36,11 @@ class SearchEngine(context: Context) { private val requestCreationExecutor: ExecutorService = Executors.newSingleThreadExecutor() fun search( - detectedObject: DetectedObjectInfo, - listener: (detectedObject: DetectedObjectInfo, productList: List) -> Unit + confirmedObject: ConfirmedObjectInfo, + listener: (confirmedObject: ConfirmedObjectInfo, productList: List) -> Unit ) { // Crops the object image out of the full image is expensive, so do it off the UI thread. - Tasks.call(requestCreationExecutor, Callable { createRequest(detectedObject) }) + Tasks.call(requestCreationExecutor, Callable { createRequest(confirmedObject) }) .addOnSuccessListener { productRequest -> searchRequestQueue.add(productRequest.setTag(TAG)) } .addOnFailureListener { e -> Log.e(TAG, "Failed to create product search request!", e) @@ -50,7 +51,7 @@ class SearchEngine(context: Context) { Product(/* imageUrl= */"", "Product title $i", "Product subtitle $i") ) } - listener.invoke(detectedObject, productList) + listener.invoke(confirmedObject, productList) } } @@ -63,7 +64,7 @@ class SearchEngine(context: Context) { private const val TAG = "SearchEngine" @Throws(Exception::class) - private fun createRequest(searchingObject: DetectedObjectInfo): JsonObjectRequest { + private fun createRequest(searchingObject: ConfirmedObjectInfo): JsonObjectRequest { val objectImageData = searchingObject.imageData ?: throw Exception("Failed to get object image data!") diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt index 201746025d..2191619ffb 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/productsearch/SearchedObject.kt @@ -21,12 +21,13 @@ import android.graphics.Bitmap import android.graphics.Rect import com.google.mlkit.md.R import com.google.mlkit.md.Utils +import com.google.mlkit.md.objectdetection.ConfirmedObjectInfo import com.google.mlkit.md.objectdetection.DetectedObjectInfo /** Hosts the detected object info and its search result. */ class SearchedObject( resources: Resources, - private val detectedObject: DetectedObjectInfo, + private val confirmedObject: ConfirmedObjectInfo, val productList: List ) { @@ -34,14 +35,14 @@ class SearchedObject( private var objectThumbnail: Bitmap? = null val objectIndex: Int - get() = detectedObject.objectIndex + get() = confirmedObject.objectIndex val boundingBox: Rect - get() = detectedObject.boundingBox + get() = confirmedObject.boundingBox @Synchronized fun getObjectThumbnail(): Bitmap = objectThumbnail ?: let { - Utils.getCornerRoundedBitmap(detectedObject.getBitmap(), objectThumbnailCornerRadius) + Utils.getCornerRoundedBitmap(confirmedObject.bitmap, objectThumbnailCornerRadius) .also { objectThumbnail = it } } } From 0ba6451cfd1d96b67c6558c19e2f29d208939848 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Tue, 25 Feb 2025 16:19:55 +0530 Subject: [PATCH 17/18] - Removed the pictureSize property from CameraSource and also its usage preference over previewSize if exists for Camera2APISource & GraphicOverlay to fix the issue in processing frames (could be due to higher resolution) by MLKit ObjectDetector. - Revert the applicationId changes done for debugging purpose. --- android/material-showcase/app/build.gradle | 2 +- .../google/mlkit/md/camera/Camera2APISource.kt | 16 ++++------------ .../google/mlkit/md/camera/CameraAPISource.kt | 2 -- .../com/google/mlkit/md/camera/CameraSource.kt | 5 ----- .../mlkit/md/camera/CameraSourceFactory.kt | 2 +- .../com/google/mlkit/md/camera/GraphicOverlay.kt | 7 +------ 6 files changed, 7 insertions(+), 27 deletions(-) diff --git a/android/material-showcase/app/build.gradle b/android/material-showcase/app/build.gradle index 78210f29c2..1b56c0fb23 100644 --- a/android/material-showcase/app/build.gradle +++ b/android/material-showcase/app/build.gradle @@ -5,7 +5,7 @@ apply plugin: 'kotlin-android-extensions' android { compileSdkVersion 31 defaultConfig { - applicationId "com.google.mlkit.mdn" + applicationId "com.google.mlkit.md" minSdkVersion 21 targetSdkVersion 31 versionCode 1 diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt index 9fde18e60c..9d67a24c3d 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/Camera2APISource.kt @@ -114,9 +114,6 @@ class Camera2APISource(private val graphicOverlay: GraphicOverlay): CameraSource /** [Size] that is currently in use by the [camera] */ private var previewSize: Size? = null - /** [Size] that is use by the [imageReader] as a preference over [previewSize] If it exists */ - private var pictureSize: Size? = null - /** [Thread] for detecting & processing [imageReader] frames */ private var processingThread: Thread? = null @@ -312,21 +309,16 @@ class Camera2APISource(private val graphicOverlay: GraphicOverlay): CameraSource override fun getSelectedPreviewSize() = previewSize - override fun getSelectedPictureSize() = pictureSize - override fun start(surfaceHolder: SurfaceHolder) { runBlocking { mutex.withLock { if (camera != null) return@withLock - camera = createCamera().also {cameraDevice -> - getPreviewAndPictureSize(this@Camera2APISource).also { sizePair -> - previewSize = sizePair.preview - pictureSize = sizePair.picture - val imageSize = sizePair.picture ?: sizePair.preview - imageReader = ImageReader.newInstance(imageSize.width, imageSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> - session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also {cameraCaptureSession -> + camera = createCamera().also { cameraDevice -> + getPreviewAndPictureSize(this@Camera2APISource).preview.let { previewSize -> + imageReader = ImageReader.newInstance(previewSize.width, previewSize.height, IMAGE_FORMAT, IMAGE_BUFFER_SIZE).also { imageReader -> + session = createCaptureSession(cameraDevice, listOf(surfaceHolder.surface, imageReader.surface), cameraHandler).also { cameraCaptureSession -> captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply { addTarget(surfaceHolder.surface) addTarget(imageReader.surface) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt index a29d1c28c8..518e54025b 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraAPISource.kt @@ -250,8 +250,6 @@ class CameraAPISource(private val graphicOverlay: GraphicOverlay) : CameraSource override fun getSelectedPreviewSize(): Size? = previewSize - override fun getSelectedPictureSize(): Size? = null - @Synchronized override fun start(surfaceHolder: SurfaceHolder) { if (camera != null) return diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt index c2a83348df..a427639919 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSource.kt @@ -46,11 +46,6 @@ abstract class CameraSource { */ internal abstract fun getSelectedPreviewSize(): Size? - /** - * Returns the selected picture [Size] by the Camera - */ - internal abstract fun getSelectedPictureSize(): Size? - /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt index 57bb69392e..c5d7fc2b9b 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt @@ -32,7 +32,7 @@ object CameraSourceFactory { CameraAPISource(graphicOverlay) } else { Log.d(TAG, "Camera2 API source used") - Camera2APISource(graphicOverlay) + CameraAPISource(graphicOverlay) } } diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt index c3669fb3c8..d841545788 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/GraphicOverlay.kt @@ -80,12 +80,7 @@ class GraphicOverlay(context: Context, attrs: AttributeSet) : View(context, attr * coordinates later. */ fun setCameraInfo(cameraSource: CameraSource) { - //Adding picture size and also as a preferred way because now with the Camera 2 API we have to - //always define the size for the preview frames where we always have to give preference to - //picture size (if it exists) as compare to preview size. This change is to fix barcode detection issue - //in-cases where picture size is higher than preview size(e.g. preview size: 1088 x 1088 & picture - // size: 3024 x 3024). - val previewSize = cameraSource.getSelectedPictureSize() ?: cameraSource.getSelectedPreviewSize() ?: return + val previewSize = cameraSource.getSelectedPreviewSize() ?: return if (Utils.isPortraitMode(context)) { // Swap width and height when in portrait, since camera's natural orientation is landscape. previewWidth = previewSize.height From 459adcf7ede6f6041abfc6b295739a3c5adb44a2 Mon Sep 17 00:00:00 2001 From: Eeshan Jamal Date: Tue, 25 Feb 2025 17:56:28 +0530 Subject: [PATCH 18/18] Revert the changes done for enforced testing of CameraAPISource on newer devices. --- .../main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt index c5d7fc2b9b..57bb69392e 100644 --- a/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt +++ b/android/material-showcase/app/src/main/java/com/google/mlkit/md/camera/CameraSourceFactory.kt @@ -32,7 +32,7 @@ object CameraSourceFactory { CameraAPISource(graphicOverlay) } else { Log.d(TAG, "Camera2 API source used") - CameraAPISource(graphicOverlay) + Camera2APISource(graphicOverlay) } }