From 3e4245768dc3cd4490944e5a3c94a528302ffb46 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Fri, 2 Aug 2024 19:38:27 +0800
Subject: [PATCH 1/9] Merge actual implementation in
 https://github.com/google/ExoPlayer/pull/7132.

---
 libraries/decoder_ffmpeg/README.md            |  34 +-
 libraries/decoder_ffmpeg/build.gradle         |   3 +-
 .../ExperimentalFfmpegVideoDecoder.java       | 256 ++++++++
 .../ExperimentalFfmpegVideoRenderer.java      |  96 ++-
 .../media3/decoder/ffmpeg/FfmpegLibrary.java  |   1 +
 .../src/main/jni/BlockingQueue.h              | 132 +++++
 .../src/main/jni/CMakeLists.txt               |  28 +-
 .../src/main/jni/build_ffmpeg.sh              |   2 +-
 .../decoder_ffmpeg/src/main/jni/build_yuv.sh  |  65 +++
 .../decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc | 545 ++++++++++++++++++
 10 files changed, 1136 insertions(+), 26 deletions(-)
 create mode 100644 libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
 create mode 100644 libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h
 create mode 100755 libraries/decoder_ffmpeg/src/main/jni/build_yuv.sh

diff --git a/libraries/decoder_ffmpeg/README.md b/libraries/decoder_ffmpeg/README.md
index 1d39442f0a9..931bdda7293 100644
--- a/libraries/decoder_ffmpeg/README.md
+++ b/libraries/decoder_ffmpeg/README.md
@@ -1,7 +1,7 @@
 # FFmpeg decoder module
 
-The FFmpeg module provides `FfmpegAudioRenderer`, which uses FFmpeg for decoding
-and can render audio encoded in a variety of formats.
+The FFmpeg module provides `FfmpegAudioRenderer` and `ExperimentalFfmpegVideoRenderer`, which uses FFmpeg for decoding
+and can render audio & video encoded in a variety of formats.
 
 ## License note
 
@@ -65,7 +65,7 @@ FFMPEG_PATH="$(pwd)"
   details of the available decoders, and which formats they support.
 
 ```
-ENABLED_DECODERS=(vorbis opus flac)
+ENABLED_DECODERS=(vorbis opus flac h264 hevc)
 ```
 
 *   Add a link to the FFmpeg source code in the FFmpeg module `jni` directory.
@@ -85,6 +85,34 @@ cd "${FFMPEG_MODULE_PATH}/jni" && \
   "${FFMPEG_MODULE_PATH}" "${NDK_PATH}" "${HOST_PLATFORM}" "${ANDROID_ABI}" "${ENABLED_DECODERS[@]}"
 ```
 
+
+Attempt to Rotate ``AVPixelFormat::AV_PIX_FMT_YUV420P`` & Copy the Pixels to ``ANativeWindow`` Buffer. The `libyuv` is also required. 
+
+* Fetch `libyuv` and checkout an appropriate branch:
+
+```
+cd "<preferred location for libyuv>" && \
+git clone https://chromium.googlesource.com/libyuv/libyuv && \
+YUV_PATH="$(pwd)"
+```
+
+*   Add a link to the `libyuv` source code in the `libyuv` module `jni` directory.
+
+```
+cd "${FFMPEG_MODULE_PATH}/jni" && \
+ln -s "$YUV_PATH" libyuv
+```
+
+* Execute `build_yuv.sh` to build libyuv for `armeabi-v7a`, `arm64-v8a`,
+  `x86` and `x86_64`. The script can be edited if you need to build for
+  different architectures:
+
+```
+cd "${FFMPEG_MODULE_PATH}/jni" && \
+./build_yuv.sh \
+  "${FFMPEG_MODULE_PATH}" "${NDK_PATH}" "${ANDROID_ABI}"
+```
+
 ## Build instructions (Windows)
 
 We do not provide support for building this module on Windows, however it should
diff --git a/libraries/decoder_ffmpeg/build.gradle b/libraries/decoder_ffmpeg/build.gradle
index 3c111c9e140..1a31f2564ce 100644
--- a/libraries/decoder_ffmpeg/build.gradle
+++ b/libraries/decoder_ffmpeg/build.gradle
@@ -17,7 +17,7 @@ android.namespace = 'androidx.media3.decoder.ffmpeg'
 
 // Configure the native build only if ffmpeg is present to avoid gradle sync
 // failures if ffmpeg hasn't been built according to the README instructions.
-if (project.file('src/main/jni/ffmpeg').exists()) {
+if (project.file('src/main/jni/ffmpeg').exists() && project.file('src/main/jni/libyuv').exists()) {
     android.externalNativeBuild.cmake.path = 'src/main/jni/CMakeLists.txt'
     // Should match cmake_minimum_required.
     android.externalNativeBuild.cmake.version = '3.21.0+'
@@ -28,6 +28,7 @@ dependencies {
     // TODO(b/203752526): Remove this dependency.
     implementation project(modulePrefix + 'lib-exoplayer')
     implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
+    implementation project(modulePrefix + 'lib-common')
     compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
     compileOnly 'org.jetbrains.kotlin:kotlin-annotations-jvm:' + kotlinAnnotationsVersion
     testImplementation project(modulePrefix + 'test-utils')
diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
new file mode 100644
index 00000000000..3707b56d08d
--- /dev/null
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media3.decoder.ffmpeg;
+
+import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE;
+
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+import androidx.media3.common.C;
+import androidx.media3.common.Format;
+import androidx.media3.common.util.Assertions;
+import androidx.media3.common.util.UnstableApi;
+import androidx.media3.common.util.Util;
+import androidx.media3.decoder.DecoderInputBuffer;
+import androidx.media3.decoder.SimpleDecoder;
+import androidx.media3.decoder.VideoDecoderOutputBuffer;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+/**
+ * Ffmpeg Video decoder.
+ */
+@VisibleForTesting(otherwise = PACKAGE_PRIVATE)
+@UnstableApi
+/* package */ final class ExperimentalFfmpegVideoDecoder
+    extends SimpleDecoder<DecoderInputBuffer, VideoDecoderOutputBuffer, FfmpegDecoderException> {
+
+  private static final String TAG = "FfmpegVideoDecoder";
+
+  // LINT.IfChange
+  private static final int VIDEO_DECODER_SUCCESS = 0;
+  private static final int VIDEO_DECODER_ERROR_INVALID_DATA = -1;
+  private static final int VIDEO_DECODER_ERROR_OTHER = -2;
+  private static final int VIDEO_DECODER_ERROR_READ_FRAME = -3;
+  // LINT.ThenChange(../../../../../../../jni/ffmpeg_jni.cc)
+
+  private final String codecName;
+  private long nativeContext;
+  @Nullable
+  private final byte[] extraData;
+  @C.VideoOutputMode
+  private volatile int outputMode;
+
+  private int degree = 0;
+
+  /**
+   * Creates a Ffmpeg video Decoder.
+   *
+   * @param numInputBuffers        Number of input buffers.
+   * @param numOutputBuffers       Number of output buffers.
+   * @param initialInputBufferSize The initial size of each input buffer, in bytes.
+   * @param threads                Number of threads libffmpeg will use to decode.
+   * @throws FfmpegDecoderException Thrown if an exception occurs when initializing the decoder.
+   */
+  public ExperimentalFfmpegVideoDecoder(
+      int numInputBuffers, int numOutputBuffers, int initialInputBufferSize, int threads,
+      Format format)
+      throws FfmpegDecoderException {
+    super(
+        new DecoderInputBuffer[numInputBuffers],
+        new VideoDecoderOutputBuffer[numOutputBuffers]);
+    if (!FfmpegLibrary.isAvailable()) {
+      throw new FfmpegDecoderException("Failed to load decoder native library.");
+    }
+    codecName = Assertions.checkNotNull(FfmpegLibrary.getCodecName(format.sampleMimeType));
+    extraData = getExtraData(format.sampleMimeType, format.initializationData);
+    degree = format.rotationDegrees;
+    nativeContext = ffmpegInitialize(codecName, extraData, threads, degree);
+    if (nativeContext == 0) {
+      throw new FfmpegDecoderException("Failed to initialize decoder.");
+    }
+    setInitialInputBufferSize(initialInputBufferSize);
+  }
+
+  /**
+   * Returns FFmpeg-compatible codec-specific initialization data ("extra data"), or {@code null} if
+   * not required.
+   */
+  @Nullable
+  private static byte[] getExtraData(String mimeType, List<byte[]> initializationData) {
+    int size = 0;
+    for (int i = 0; i < initializationData.size(); i++) {
+      size += initializationData.get(i).length;
+    }
+    if (size > 0) {
+      byte[] extra = new byte[size];
+      ByteBuffer wrapper = ByteBuffer.wrap(extra);
+      for (int i = 0; i < initializationData.size(); i++) {
+        wrapper.put(initializationData.get(i));
+      }
+      return extra;
+    }
+    return null;
+  }
+
+  @Override
+  public String getName() {
+    return "ffmpeg" + FfmpegLibrary.getVersion() + "-" + codecName;
+  }
+
+  /**
+   * Sets the output mode for frames rendered by the decoder.
+   *
+   * @param outputMode The output mode.
+   */
+  public void setOutputMode(@C.VideoOutputMode int outputMode) {
+    this.outputMode = outputMode;
+  }
+
+  @Override
+  protected DecoderInputBuffer createInputBuffer() {
+    return new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT);
+  }
+
+  @Override
+  protected VideoDecoderOutputBuffer createOutputBuffer() {
+    return new VideoDecoderOutputBuffer(this::releaseOutputBuffer);
+  }
+
+  @Override
+  @Nullable
+  protected FfmpegDecoderException decode(
+      DecoderInputBuffer inputBuffer, VideoDecoderOutputBuffer outputBuffer, boolean reset) {
+    if (reset) {
+
+      nativeContext = ffmpegReset(nativeContext);
+      if (nativeContext == 0) {
+        return new FfmpegDecoderException("Error resetting (see logcat).");
+      }
+    }
+
+    // send packet
+    ByteBuffer inputData = Util.castNonNull(inputBuffer.data);
+    int inputSize = inputData.limit();
+    // enqueue origin data
+    int sendPacketResult = ffmpegSendPacket(nativeContext, inputData, inputSize,
+        inputBuffer.timeUs);
+
+    if (sendPacketResult == VIDEO_DECODER_ERROR_INVALID_DATA) {
+      outputBuffer.shouldBeSkipped = true;
+      return null;
+    } else if (sendPacketResult == VIDEO_DECODER_ERROR_READ_FRAME) {
+      // need read frame
+    } else if (sendPacketResult == VIDEO_DECODER_ERROR_OTHER) {
+      return new FfmpegDecoderException("ffmpegDecode error: (see logcat)");
+    }
+
+    // receive frame
+    boolean decodeOnly = !isAtLeastOutputStartTimeUs(inputBuffer.timeUs);
+    // We need to dequeue the decoded frame from the decoder even when the input data is
+    // decode-only.
+    if (!decodeOnly) {
+      outputBuffer.init(inputBuffer.timeUs, outputMode, null);
+    }
+    int getFrameResult = ffmpegReceiveFrame(nativeContext, outputMode, outputBuffer, decodeOnly);
+    if (getFrameResult == VIDEO_DECODER_ERROR_OTHER) {
+      return new FfmpegDecoderException("ffmpegDecode error: (see logcat)");
+    }
+
+    if (getFrameResult == VIDEO_DECODER_ERROR_INVALID_DATA) {
+      outputBuffer.shouldBeSkipped = true;
+    }
+
+    if (!decodeOnly) {
+      outputBuffer.format = inputBuffer.format;
+    }
+
+    return null;
+  }
+
+  @Override
+  protected FfmpegDecoderException createUnexpectedDecodeException(Throwable error) {
+    return new FfmpegDecoderException("Unexpected decode error", error);
+  }
+
+  @Override
+  public void release() {
+    super.release();
+    ffmpegRelease(nativeContext);
+    nativeContext = 0;
+  }
+
+  /**
+   * Renders output buffer to the given surface. Must only be called when in {@link
+   * C#VIDEO_OUTPUT_MODE_SURFACE_YUV} mode.
+   *
+   * @param outputBuffer Output buffer.
+   * @param surface      Output surface.
+   * @throws FfmpegDecoderException Thrown if called with invalid output mode or frame rendering
+   *                                fails.
+   */
+  public void renderToSurface(VideoDecoderOutputBuffer outputBuffer, Surface surface)
+      throws FfmpegDecoderException {
+    if (outputBuffer.mode != C.VIDEO_OUTPUT_MODE_SURFACE_YUV) {
+      throw new FfmpegDecoderException("Invalid output mode.");
+    }
+    int rst = ffmpegRenderFrame(nativeContext, surface, outputBuffer, outputBuffer.width,
+        outputBuffer.height);
+//    Log.d(TAG, "renderToSurface: rst = " + rst + ",surface = " + surface + ",buffer = " + outputBuffer.timeUs);
+    if (rst == VIDEO_DECODER_ERROR_OTHER) {
+      throw new FfmpegDecoderException(
+          "Buffer render error: ");
+    }
+  }
+
+  private native long ffmpegInitialize(String codecName, @Nullable byte[] extraData, int threads,
+      int degree);
+
+  private native long ffmpegReset(long context);
+
+  private native void ffmpegRelease(long context);
+
+  private native int ffmpegRenderFrame(
+      long context, Surface surface, VideoDecoderOutputBuffer outputBuffer,
+      int displayedWidth,
+      int displayedHeight);
+
+  /**
+   * Decodes the encoded data passed.
+   *
+   * @param context     Decoder context.
+   * @param encodedData Encoded data.
+   * @param length      Length of the data buffer.
+   * @return {@link #VIDEO_DECODER_SUCCESS} if successful, {@link #VIDEO_DECODER_ERROR_OTHER} if an
+   * error occurred.
+   */
+  private native int ffmpegSendPacket(long context, ByteBuffer encodedData, int length,
+      long inputTime);
+
+  /**
+   * Gets the decoded frame.
+   *
+   * @param context      Decoder context.
+   * @param outputBuffer Output buffer for the decoded frame.
+   * @return {@link #VIDEO_DECODER_SUCCESS} if successful, {@link #VIDEO_DECODER_ERROR_INVALID_DATA}
+   * if successful but the frame is decode-only, {@link #VIDEO_DECODER_ERROR_OTHER} if an error
+   * occurred.
+   */
+  private native int ffmpegReceiveFrame(
+      long context, int outputMode, VideoDecoderOutputBuffer outputBuffer, boolean decodeOnly);
+
+}
diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
index e9b765906b3..b2224a265ee 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
@@ -18,12 +18,15 @@
 import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_MIME_TYPE_CHANGED;
 import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
 import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_YES_WITHOUT_RECONFIGURATION;
+import static java.lang.Runtime.getRuntime;
 
 import android.os.Handler;
 import android.view.Surface;
 import androidx.annotation.Nullable;
 import androidx.media3.common.C;
 import androidx.media3.common.Format;
+import androidx.media3.common.MimeTypes;
+import androidx.media3.common.util.Assertions;
 import androidx.media3.common.util.TraceUtil;
 import androidx.media3.common.util.UnstableApi;
 import androidx.media3.common.util.Util;
@@ -47,6 +50,29 @@ public final class ExperimentalFfmpegVideoRenderer extends DecoderVideoRenderer
 
   private static final String TAG = "ExperimentalFfmpegVideoRenderer";
 
+  private static final int DEFAULT_NUM_OF_INPUT_BUFFERS = 4;
+  private static final int DEFAULT_NUM_OF_OUTPUT_BUFFERS = 4;
+
+  /* Default size based on 720p resolution video compressed by a factor of two. */
+  private static final int DEFAULT_INPUT_BUFFER_SIZE =
+      Util.ceilDivide(1280, 64) * Util.ceilDivide(720, 64) * (64 * 64 * 3 / 2) / 2;
+
+
+  /**
+   * The number of input buffers.
+   */
+  private final int numInputBuffers;
+  /**
+   * The number of output buffers. The renderer may limit the minimum possible value due to
+   * requiring multiple output buffers to be dequeued at a time for it to make progress.
+   */
+  private final int numOutputBuffers;
+
+  private final int threads;
+
+  @Nullable
+  private ExperimentalFfmpegVideoDecoder decoder;
+
   /**
    * Creates a new instance.
    *
@@ -63,8 +89,39 @@ public ExperimentalFfmpegVideoRenderer(
       @Nullable Handler eventHandler,
       @Nullable VideoRendererEventListener eventListener,
       int maxDroppedFramesToNotify) {
+    this(
+        allowedJoiningTimeMs,
+        eventHandler,
+        eventListener,
+        maxDroppedFramesToNotify,
+        /* threads= */ getRuntime().availableProcessors(),
+        DEFAULT_NUM_OF_INPUT_BUFFERS,
+        DEFAULT_NUM_OF_OUTPUT_BUFFERS);
+  }
+
+  /**
+   * Creates a new instance.
+   *
+   * @param allowedJoiningTimeMs     The maximum duration in milliseconds for which this video renderer
+   *                                 can attempt to seamlessly join an ongoing playback.
+   * @param eventHandler             A handler to use when delivering events to {@code eventListener}. May be
+   *                                 null if delivery of events is not required.
+   * @param eventListener            A listener of events. May be null if delivery of events is not required.
+   * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+   *                                 invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+   */
+  public ExperimentalFfmpegVideoRenderer(
+      long allowedJoiningTimeMs,
+      @Nullable Handler eventHandler,
+      @Nullable VideoRendererEventListener eventListener,
+      int maxDroppedFramesToNotify,
+      int threads,
+      int numInputBuffers,
+      int numOutputBuffers) {
     super(allowedJoiningTimeMs, eventHandler, eventListener, maxDroppedFramesToNotify);
-    // TODO: Implement.
+    this.threads = threads;
+    this.numInputBuffers = numInputBuffers;
+    this.numOutputBuffers = numOutputBuffers;
   }
 
   @Override
@@ -74,51 +131,54 @@ public String getName() {
 
   @Override
   public final @RendererCapabilities.Capabilities int supportsFormat(Format format) {
-    // TODO: Remove this line and uncomment the implementation below.
-    return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
-    /*
     String mimeType = Assertions.checkNotNull(format.sampleMimeType);
     if (!FfmpegLibrary.isAvailable() || !MimeTypes.isVideo(mimeType)) {
-      return FORMAT_UNSUPPORTED_TYPE;
+      return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
     } else if (!FfmpegLibrary.supportsFormat(format.sampleMimeType)) {
-      return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE);
-    } else if (format.exoMediaCryptoType != null) {
-      return RendererCapabilities.create(FORMAT_UNSUPPORTED_DRM);
+      return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
+    } else if (format.cryptoType != C.CRYPTO_TYPE_NONE) {
+      return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM);
     } else {
       return RendererCapabilities.create(
-          FORMAT_HANDLED,
+          C.FORMAT_HANDLED,
           ADAPTIVE_SEAMLESS,
           TUNNELING_NOT_SUPPORTED);
     }
-    */
   }
 
-  @SuppressWarnings("nullness:return")
   @Override
   protected Decoder<DecoderInputBuffer, VideoDecoderOutputBuffer, FfmpegDecoderException>
       createDecoder(Format format, @Nullable CryptoConfig cryptoConfig)
           throws FfmpegDecoderException {
     TraceUtil.beginSection("createFfmpegVideoDecoder");
-    // TODO: Implement, remove the SuppressWarnings annotation, and update the return type to use
-    // the concrete type of the decoder (probably FfmepgVideoDecoder).
+    int initialInputBufferSize =
+        format.maxInputSize != Format.NO_VALUE ? format.maxInputSize : DEFAULT_INPUT_BUFFER_SIZE;
+    int threads = Math.max(this.threads, 4);
+    ExperimentalFfmpegVideoDecoder decoder =
+        new ExperimentalFfmpegVideoDecoder(numInputBuffers, numOutputBuffers,
+            initialInputBufferSize, threads,
+            format);
+    this.decoder = decoder;
     TraceUtil.endSection();
-    return null;
+    return decoder;
   }
 
   @Override
   protected void renderOutputBufferToSurface(VideoDecoderOutputBuffer outputBuffer, Surface surface)
       throws FfmpegDecoderException {
-    // TODO: Implement.
+    if (decoder == null) {
+      throw new FfmpegDecoderException(
+          "Failed to render output buffer to surface: decoder is not initialized.");
+    }
+    decoder.renderToSurface(outputBuffer, surface);
+    outputBuffer.release();
   }
 
   @Override
   protected void setDecoderOutputMode(@C.VideoOutputMode int outputMode) {
-    // TODO: Uncomment the implementation below.
-    /*
     if (decoder != null) {
       decoder.setOutputMode(outputMode);
     }
-    */
   }
 
   @Override
diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
index e01b04f2ff3..62b557bfd4f 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
@@ -152,6 +152,7 @@ public static boolean supportsFormat(String mimeType) {
       case MimeTypes.VIDEO_H264:
         return "h264";
       case MimeTypes.VIDEO_H265:
+      case MimeTypes.VIDEO_DOLBY_VISION:
         return "hevc";
       default:
         return null;
diff --git a/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h b/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h
new file mode 100644
index 00000000000..266a8ea9d07
--- /dev/null
+++ b/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h
@@ -0,0 +1,132 @@
+#include <mutex>
+#include <condition_variable>
+#include <deque>
+#include <chrono>
+
+template<class T>
+class BlockingQueue {
+public:
+    using size_type = typename std::deque<T>::size_type;
+
+public:
+    BlockingQueue(const int cap = -1) : m_maxCapacity(cap) {}
+
+    ~BlockingQueue() {}
+
+    BlockingQueue(const BlockingQueue &) = delete;
+
+    BlockingQueue &operator=(const BlockingQueue &) = delete;
+
+public:
+    void put(const T t);
+
+    T take();
+
+    bool empty() const {
+        std::lock_guard<std::mutex> lock(m_mutex);
+        return m_queue.empty();
+    }
+
+    bool full() const {
+        if (-1 == m_maxCapacity)
+            return false;
+        std::lock_guard<std::mutex> lock(m_mutex);
+        return m_queue.size() >= m_maxCapacity;
+    }
+
+    size_type size() {
+        std::lock_guard<std::mutex> lock(m_mutex);
+        return m_queue.size();
+    }
+
+public:
+    bool offer(const T t);
+
+    bool poll(T &t);
+
+    bool offer(const T t, long mils);
+
+    bool poll(T &t, long mils);
+
+private:
+    std::deque<T> m_queue;
+    const int m_maxCapacity;
+    mutable std::mutex m_mutex;
+    std::condition_variable m_cond_empty;
+    std::condition_variable m_cond_full;
+};
+
+template<class T>
+void BlockingQueue<T>::put(const T t) {
+    std::unique_lock<std::mutex> lock(m_mutex);
+    if (m_maxCapacity != -1) {
+        m_cond_full.wait(lock, [this] { return m_queue.size() < m_maxCapacity; });
+    }
+    m_queue.push_back(t);
+    m_cond_empty.notify_all();
+}
+
+template<class T>
+T BlockingQueue<T>::take() {
+    std::unique_lock<std::mutex> lock(m_mutex);
+    // take必须判断队列为空
+    m_cond_empty.wait(lock, [&]() { return !m_queue.empty(); });
+    auto res = m_queue.front();
+    m_queue.pop_front();
+    m_cond_full.notify_all();
+    return res;
+}
+
+template<class T>
+bool BlockingQueue<T>::offer(const T t) {
+    std::unique_lock<std::mutex> lock(m_mutex);
+    if (m_maxCapacity != -1 && m_queue.size() >= m_maxCapacity) {
+        return false;
+    }
+    m_queue.push_back(t);
+    m_cond_empty.notify_all();
+    return true;
+}
+
+template<class T>
+bool BlockingQueue<T>::poll(T &t) {
+    std::unique_lock<std::mutex> lock(m_mutex);
+    if (m_queue.empty()) {
+        return false;
+    }
+    t = m_queue.front();
+    m_queue.pop_front();
+    m_cond_full.notify_all();
+    return true;
+}
+
+template<class T>
+bool BlockingQueue<T>::offer(const T t, long mils) {
+    std::unique_lock<std::mutex> lock(m_mutex);
+    std::chrono::milliseconds time(mils);
+    if (m_maxCapacity != -1) {
+        bool result = m_cond_full.wait_for(lock, time,
+                                       [&] { return m_queue.size() < m_maxCapacity; });
+        if (!result) {
+            return false;
+        }
+    }
+    m_queue.push_back(t);
+    m_cond_empty.notify_all();
+    return true;
+}
+
+template<class T>
+bool BlockingQueue<T>::poll(T &t, long mils) {
+    std::chrono::milliseconds time(mils);
+    std::unique_lock<std::mutex> lock(m_mutex);
+    bool result = m_cond_empty.wait_for(lock, time,
+                                        [&] { return !m_queue.empty(); });
+    if (!result) {
+        return false;
+    }
+    t = m_queue.front();
+    m_queue.pop_front();
+    m_cond_full.notify_all();
+    return true;
+}
\ No newline at end of file
diff --git a/libraries/decoder_ffmpeg/src/main/jni/CMakeLists.txt b/libraries/decoder_ffmpeg/src/main/jni/CMakeLists.txt
index fe74c78048d..a511355cc35 100644
--- a/libraries/decoder_ffmpeg/src/main/jni/CMakeLists.txt
+++ b/libraries/decoder_ffmpeg/src/main/jni/CMakeLists.txt
@@ -24,12 +24,12 @@ project(libffmpegJNI C CXX)
 set(ffmpeg_location "${CMAKE_CURRENT_SOURCE_DIR}/ffmpeg")
 set(ffmpeg_binaries "${ffmpeg_location}/android-libs/${ANDROID_ABI}")
 
-foreach(ffmpeg_lib avutil swresample avcodec)
-    set(ffmpeg_lib_filename lib${ffmpeg_lib}.a)
+foreach(ffmpeg_lib avutil swresample avcodec swscale)
+    set(ffmpeg_lib_filename lib${ffmpeg_lib}.so)
     set(ffmpeg_lib_file_path ${ffmpeg_binaries}/${ffmpeg_lib_filename})
     add_library(
             ${ffmpeg_lib}
-            STATIC
+            SHARED
             IMPORTED)
     set_target_properties(
             ${ffmpeg_lib} PROPERTIES
@@ -37,7 +37,27 @@ foreach(ffmpeg_lib avutil swresample avcodec)
             ${ffmpeg_lib_file_path})
 endforeach()
 
+
+set(yuv_location "${CMAKE_CURRENT_SOURCE_DIR}/libyuv")
+set(yuv_binaries "${yuv_location}/android-libs/${ANDROID_ABI}")
+
+foreach(yuv_lib yuv)
+    set(yuv_lib_filename lib${yuv_lib}.so)
+    set(yuv_lib_file_path ${yuv_binaries}/${yuv_lib_filename})
+    add_library(
+            ${yuv_lib}
+            SHARED
+            IMPORTED)
+    set_target_properties(
+            ${yuv_lib} PROPERTIES
+            IMPORTED_LOCATION
+            ${yuv_lib_file_path})
+endforeach()
+
+
 include_directories(${ffmpeg_location})
+include_directories("${yuv_location}/include")
+
 find_library(android_log_lib log)
 
 add_library(ffmpegJNI
@@ -49,6 +69,8 @@ target_link_libraries(ffmpegJNI
                       PRIVATE swresample
                       PRIVATE avcodec
                       PRIVATE avutil
+                      PRIVATE swscale
+                      PRIVATE yuv
                       PRIVATE ${android_log_lib})
 
 # Additional flags needed for "arm64-v8a" from NDK 23.1.7779620 and above.
diff --git a/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh b/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
index e0a9aa4f844..c38263e4406 100755
--- a/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
+++ b/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
@@ -37,7 +37,7 @@ COMMON_OPTIONS="
     --disable-everything
     --disable-avdevice
     --disable-avformat
-    --disable-swscale
+    --enable-swscale
     --disable-postproc
     --disable-avfilter
     --disable-symver
diff --git a/libraries/decoder_ffmpeg/src/main/jni/build_yuv.sh b/libraries/decoder_ffmpeg/src/main/jni/build_yuv.sh
new file mode 100755
index 00000000000..6c8db3941ab
--- /dev/null
+++ b/libraries/decoder_ffmpeg/src/main/jni/build_yuv.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+set -eu
+
+FFMPEG_MODULE_PATH="$1"
+echo "FFMPEG_MODULE_PATH is ${FFMPEG_MODULE_PATH}"
+NDK_PATH="$2"
+echo "NDK path is ${NDK_PATH}"
+ANDROID_ABI="$3"
+echo "ANDROID_ABI is ${ANDROID_ABI}"
+
+ABI_LIST="armeabi-v7a arm64-v8a x86 x86_64"
+echo "ABI List is ${ABI_LIST}"
+
+ANDROID_ABI_64BIT="$ANDROID_ABI"
+if [[ "$ANDROID_ABI_64BIT" -lt 21 ]]
+then
+    echo "Using ANDROID_ABI 21 for 64-bit architectures"
+    ANDROID_ABI_64BIT=21
+fi
+
+cd "${FFMPEG_MODULE_PATH}/jni/libyuv"
+
+for abi in ${ABI_LIST}; do
+  rm -rf "build-${abi}"
+  mkdir "build-${abi}"
+  cd "build-${abi}"
+
+  cmake .. \
+    -G "Unix Makefiles" \
+    -DCMAKE_TOOLCHAIN_FILE=$NDK_PATH/build/cmake/android.toolchain.cmake -DANDROID_ABI=${abi} -DCMAKE_ANDROID_ARCH_ABI=${abi} \
+    -DANDROID_NDK=${NDK_PATH} \
+    -DANDROID_PLATFORM=${ANDROID_ABI} \
+    -DCMAKE_BUILD_TYPE=Release \
+    -DBUILD_SHARED_LIBS=ON \
+    -DCMAKE_SYSTEM_NAME=Generic \
+    -DCMAKE_ANDROID_STL_TYPE=c++_shared \
+    -DCMAKE_SYSTEM_NAME=Android \
+    -DCMAKE_THREAD_PREFER_PTHREAD=TRUE \
+    -DTHREADS_PREFER_PTHREAD_FLAG=TRUE \
+    -DBUILD_STATIC_LIBS=OFF
+
+  cmake --build .
+  cd ..
+done
+
+for abi in ${ABI_LIST}; do
+  mkdir -p "./android-libs/${abi}"
+  cp -r "build-${abi}/libyuv.so" "./android-libs/${abi}/libyuv.so"
+  echo "build-${abi}/libyuv.so was successfully copied to ./android-libs/${abi}/libyuv.so!"
+done
diff --git a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
index a661b95c875..b79c57ed454 100644
--- a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
+++ b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
@@ -16,6 +16,12 @@
 #include <android/log.h>
 #include <jni.h>
 #include <stdlib.h>
+#include <thread>
+#include <android/native_window.h>
+#include <android/native_window_jni.h>
+#include "BlockingQueue.h"
+#include <libyuv.h>
+#include <libyuv/scale.h>
 
 extern "C" {
 #ifdef __cplusplus
@@ -30,11 +36,14 @@ extern "C" {
 #include <libavutil/error.h>
 #include <libavutil/opt.h>
 #include <libswresample/swresample.h>
+#include <libswscale/swscale.h>
 }
 
 #define LOG_TAG "ffmpeg_jni"
 #define LOGE(...) \
   ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
+#define LOGW(...) \
+  ((void)__android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__))
 #define LOGD(...) \
   ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
 
@@ -59,6 +68,16 @@ extern "C" {
       Java_androidx_media3_decoder_ffmpeg_FfmpegAudioDecoder_##NAME( \
           JNIEnv *env, jobject thiz, ##__VA_ARGS__)
 
+#define VIDEO_DECODER_FUNC(RETURN_TYPE, NAME, ...)                             \
+  extern "C" {                                                                 \
+  JNIEXPORT RETURN_TYPE                                                        \
+      Java_androidx_media3_decoder_ffmpeg_ExperimentalFfmpegVideoDecoder_##NAME( \
+          JNIEnv *env, jobject thiz, ##__VA_ARGS__);                           \
+  }                                                                            \
+  JNIEXPORT RETURN_TYPE                                                        \
+      Java_androidx_media3_decoder_ffmpeg_ExperimentalFfmpegVideoDecoder_##NAME( \
+          JNIEnv *env, jobject thiz, ##__VA_ARGS__)
+
 #define ERROR_STRING_BUFFER_LENGTH 256
 
 // Output format corresponding to AudioFormat.ENCODING_PCM_16BIT.
@@ -69,6 +88,12 @@ static const AVSampleFormat OUTPUT_FORMAT_PCM_FLOAT = AV_SAMPLE_FMT_FLT;
 static const int AUDIO_DECODER_ERROR_INVALID_DATA = -1;
 static const int AUDIO_DECODER_ERROR_OTHER = -2;
 
+static const int VIDEO_DECODER_ERROR_SURFACE = -4;
+static const int VIDEO_DECODER_SUCCESS = 0;
+static const int VIDEO_DECODER_ERROR_INVALID_DATA = -1;
+static const int VIDEO_DECODER_ERROR_OTHER = -2;
+static const int VIDEO_DECODER_ERROR_READ_FRAME = -3;
+
 static jmethodID growOutputBufferMethod;
 
 /**
@@ -428,3 +453,523 @@ void releaseContext(AVCodecContext *context) {
   }
   avcodec_free_context(&context);
 }
+
+// video
+
+// Android YUV format. See:
+// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12.
+const int kImageFormatYV12 = 0x32315659;
+
+struct JniContext {
+    ~JniContext() {
+        if (native_window) {
+            ANativeWindow_release(native_window);
+        }
+    }
+
+    bool MaybeAcquireNativeWindow(JNIEnv *env, jobject new_surface) {
+        if (surface == new_surface) {
+            return true;
+        }
+        if (native_window) {
+            ANativeWindow_release(native_window);
+        }
+        native_window_width = 0;
+        native_window_height = 0;
+        native_window = ANativeWindow_fromSurface(env, new_surface);
+        if (native_window == nullptr) {
+            LOGE("kJniStatusANativeWindowError");
+            surface = nullptr;
+            return false;
+        }
+        surface = new_surface;
+        return true;
+    }
+
+    jfieldID data_field;
+    jfieldID yuvPlanes_field;
+    jfieldID yuvStrides_field;
+    jfieldID width_field;
+    jfieldID height_field;
+    jfieldID pts_field;
+    jmethodID init_for_private_frame_method;
+    jmethodID init_for_yuv_frame_method;
+    jmethodID init_method;
+
+    AVCodecContext *codecContext;
+    SwsContext *swsContext;
+
+    ANativeWindow *native_window = nullptr;
+    jobject surface = nullptr;
+    // 旋转角度
+    int rotate_degree = 0;
+    int native_window_width = 0;
+    int native_window_height = 0;
+
+    // 接收数据的线程
+    std::thread *poll_image_thread;
+    bool released = false;
+    BlockingQueue<AVFrame *> *image_output_buffer = nullptr;
+    BlockingQueue<AVFrame *> *image_input_buffer = nullptr;
+};
+
+constexpr int AlignTo16(int value) { return (value + 15) & (~15); }
+
+/**
+ * Convert AvFrame ColorSpace to exoplayer supported ColorSpace
+ */
+constexpr int cvt_colorspace(AVColorSpace colorSpace) {
+    int colorspace = 0;
+    switch (colorSpace) {
+        case AVCOL_SPC_BT470BG:
+        case AVCOL_SPC_SMPTE170M:
+        case AVCOL_SPC_SMPTE240M:
+            colorspace = 1;
+        case AVCOL_SPC_BT709:
+            colorspace = 2;
+        case AVCOL_SPC_BT2020_NCL:
+        case AVCOL_SPC_BT2020_CL:
+            colorspace = 3;
+    }
+    return colorspace;
+}
+
+/**
+ * Convert other format like(yuv420p10bit) to yuv420p
+ * and scale
+ * @return AVFrame
+ */
+AVFrame *cvt_frame(JniContext *jniContext,
+                   AVFrame *src,
+                   AVPixelFormat dst_format,
+                   int dst_width,
+                   int dst_height) {
+    auto src_format = AVPixelFormat(src->format);
+    auto swsContext = sws_getCachedContext(jniContext->swsContext,
+                                           src->width, src->height, src_format,
+                                           dst_width, dst_height, dst_format,
+                                           SWS_FAST_BILINEAR, NULL, NULL, NULL
+    );
+    if (!swsContext) {
+        LOGE("Failed to allocate swsContext.");
+        return nullptr;
+    }
+
+    jniContext->swsContext = swsContext;
+    auto dst = av_frame_alloc();
+    av_frame_copy_props(dst, src); // copy meta data
+    dst->width = dst_width;
+    dst->height = dst_height;
+    dst->format = dst_format;
+    auto alloc_result = av_frame_get_buffer(dst, 0);    // allocate buffer
+    if (alloc_result != 0) {
+        logError("av_frame_get_buffer", alloc_result);
+        av_frame_free(&dst);
+        return nullptr;
+    }
+    auto scale_result = sws_scale(swsContext,
+                                  src->data, src->linesize, 0, src->height,
+                                  dst->data, dst->linesize);
+    if (!scale_result) {
+        logError("sws_scale", scale_result);
+        av_frame_free(&dst);
+        return nullptr;
+    }
+    return dst;
+}
+
+/**
+ * Convert degree to libyuv::RotationMode
+ * @return libyuv::RotationMode
+ */
+libyuv::RotationMode cvt_rotate(int degree) {
+    libyuv::RotationMode rotate = libyuv::kRotate0;
+    if (degree == 90) {
+        rotate = libyuv::kRotate90;
+    } else if (degree == 180) {
+        rotate = libyuv::kRotate180;
+    } else if (degree == 270) {
+        rotate = libyuv::kRotate270;
+    }
+    return rotate;
+}
+
+/**
+ * Single Thread to Convert Standard YUV420
+ */
+void cvt_image_runnable(JniContext *jniContext) {
+    while (!jniContext->released) {
+        auto output_buffer = jniContext->image_output_buffer;
+        auto input_buffer = jniContext->image_input_buffer;
+
+        AVFrame *input = nullptr;
+        auto poll_rst = input_buffer->poll(input, 100L);
+        if (!poll_rst || input == nullptr) {
+            continue;
+        }
+
+        // success
+        // pixformat map to yuv420p
+        auto output = cvt_frame(jniContext, input, AVPixelFormat::AV_PIX_FMT_YUV420P, input->width, input->height);
+        if (!output) {
+            LOGE("Failed to cvt_frame");
+            av_frame_free(&input);
+            jniContext->released = true;
+            return;
+        }
+        av_frame_free(&input);
+
+        {
+            auto offer_rst = output_buffer->offer(output, 100L);
+            if (!offer_rst) {
+                av_frame_free(&output);
+            }
+        }
+    }
+
+    // free
+    while (!jniContext->image_input_buffer->empty()) {
+        auto buffer = jniContext->image_input_buffer->take();
+        av_frame_free(&buffer);
+    }
+    while (!jniContext->image_output_buffer->empty()) {
+        auto buffer = jniContext->image_output_buffer->take();
+        av_frame_free(&buffer);
+    }
+
+    auto swsContext = jniContext->swsContext;
+    if (swsContext) {
+        sws_freeContext(swsContext);
+        jniContext->swsContext = NULL;
+    }
+}
+
+
+JniContext *createVideoContext(JNIEnv *env,
+                               const AVCodec *codec,
+                               jbyteArray extraData,
+                               jint threads,
+                               jint degree) {
+    JniContext *jniContext = new(std::nothrow)JniContext();
+
+    AVCodecContext *codecContext = avcodec_alloc_context3(codec);
+    if (!codecContext) {
+        LOGE("Failed to allocate context.");
+        return NULL;
+    }
+
+    // rotate
+    jniContext->rotate_degree = degree;
+
+    if (extraData) {
+        jsize size = env->GetArrayLength(extraData);
+        codecContext->extradata_size = size;
+        codecContext->extradata = (uint8_t *) av_malloc(size + AV_INPUT_BUFFER_PADDING_SIZE);
+        if (!codecContext->extradata) {
+            LOGE("Failed to allocate extradata.");
+            releaseContext(codecContext);
+            return NULL;
+        }
+        env->GetByteArrayRegion(extraData, 0, size, (jbyte *) codecContext->extradata);
+    }
+
+    // opt decode speed.
+    codecContext->flags |= AV_CODEC_FLAG_LOW_DELAY;
+    codecContext->skip_loop_filter = AVDISCARD_ALL;
+    codecContext->skip_frame = AVDISCARD_DEFAULT;
+    codecContext->thread_count = threads;
+    codecContext->err_recognition = AV_EF_IGNORE_ERR;
+    int result = avcodec_open2(codecContext, codec, NULL);
+    if (result < 0) {
+        logError("avcodec_open2", result);
+        releaseContext(codecContext);
+        return NULL;
+    }
+
+    jniContext->codecContext = codecContext;
+
+    jniContext->image_output_buffer = new BlockingQueue<AVFrame *>(5);
+    jniContext->image_input_buffer = new BlockingQueue<AVFrame *>(5);
+    jniContext->poll_image_thread = new std::thread(cvt_image_runnable, jniContext);
+    pthread_setname_np(jniContext->poll_image_thread->native_handle(), "m3:ffmpeg:cvt");
+
+    // Populate JNI References.
+    const jclass outputBufferClass = env->FindClass("androidx/media3/decoder/VideoDecoderOutputBuffer");
+    jniContext->data_field = env->GetFieldID(outputBufferClass, "data", "Ljava/nio/ByteBuffer;");
+    jniContext->width_field = env->GetFieldID(outputBufferClass, "width", "I");
+    jniContext->height_field = env->GetFieldID(outputBufferClass, "height", "I");
+    jniContext->pts_field = env->GetFieldID(outputBufferClass, "timeUs", "J");
+
+
+    jniContext->yuvPlanes_field =
+            env->GetFieldID(outputBufferClass, "yuvPlanes", "[Ljava/nio/ByteBuffer;");
+    jniContext->yuvStrides_field = env->GetFieldID(outputBufferClass, "yuvStrides", "[I");
+    jniContext->init_for_private_frame_method =
+            env->GetMethodID(outputBufferClass, "initForPrivateFrame", "(II)V");
+    jniContext->init_for_yuv_frame_method =
+            env->GetMethodID(outputBufferClass, "initForYuvFrame", "(IIIII)Z");
+    jniContext->init_method =
+            env->GetMethodID(outputBufferClass, "init", "(JILjava/nio/ByteBuffer;)V");
+
+    return jniContext;
+}
+
+
+VIDEO_DECODER_FUNC(jlong, ffmpegInitialize, jstring codecName, jbyteArray extraData, jint threads, jint degree) {
+    auto *codec = getCodecByName(env, codecName);
+    if (!codec) {
+        LOGE("Codec not found.");
+        return 0L;
+    }
+
+    return (jlong) createVideoContext(env, codec, extraData, threads, degree);
+}
+
+
+VIDEO_DECODER_FUNC(jlong, ffmpegReset, jlong jContext) {
+    JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
+    AVCodecContext *context = jniContext->codecContext;
+    if (!context) {
+        LOGE("Tried to reset without a context.");
+        return 0L;
+    }
+
+    avcodec_flush_buffers(context);
+    return (jlong) jniContext;
+}
+
+VIDEO_DECODER_FUNC(void, ffmpegRelease, jlong jContext) {
+    JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
+    AVCodecContext *context = jniContext->codecContext;
+
+    if (context) {
+        avcodec_free_context(&context);
+        jniContext->codecContext = NULL;
+    }
+
+    jniContext->released = true;
+    jniContext->poll_image_thread->detach();
+}
+
+
+VIDEO_DECODER_FUNC(jint, ffmpegSendPacket, jlong jContext, jobject encodedData,
+                   jint length, jlong inputTimeUs) {
+    JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
+    AVCodecContext *avContext = jniContext->codecContext;
+
+    uint8_t *inputBuffer = (uint8_t *) env->GetDirectBufferAddress(encodedData);
+    auto packet = av_packet_alloc();
+    packet->data = inputBuffer;
+    packet->size = length;
+    packet->pts = inputTimeUs;
+
+    int result = 0;
+    // Queue input data.
+    result = avcodec_send_packet(avContext, packet);
+    av_packet_free(&packet);
+    if (result) {
+        logError("avcodec_send_packet", result);
+        if (result == AVERROR_INVALIDDATA) {
+            // need more data
+            return VIDEO_DECODER_ERROR_INVALID_DATA;
+        } else if (result == AVERROR(EAGAIN)) {
+            // need read frame
+            return VIDEO_DECODER_ERROR_READ_FRAME;
+        } else {
+            return VIDEO_DECODER_ERROR_OTHER;
+        }
+    }
+    return result;
+}
+
+VIDEO_DECODER_FUNC(jint, ffmpegReceiveFrame, jlong jContext, jint outputMode, jobject jOutputBuffer,
+                   jboolean decodeOnly) {
+    JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
+    AVCodecContext *avContext = jniContext->codecContext;
+    int result = 0;
+    AVFrame *frame = av_frame_alloc();
+    if (!frame) {
+        LOGE("Failed to allocate output frame.");
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    result = avcodec_receive_frame(avContext, frame);
+
+    if (decodeOnly) {
+        av_frame_free(&frame);
+        return VIDEO_DECODER_ERROR_INVALID_DATA;
+    }
+
+    if (result == AVERROR(EAGAIN)) {
+        // This is not an error. The input data was decode-only or no displayable
+        // frames are available.
+        av_frame_free(&frame);
+        return VIDEO_DECODER_ERROR_INVALID_DATA;
+    }
+    if (result != 0) {
+        av_frame_free(&frame);
+        if (result == AVERROR(EAGAIN)) {
+            // This is not an error. The input data was decode-only or no displayable
+            // frames are available.
+        } else {
+            logError("avcodec_receive_frame", result);
+            return VIDEO_DECODER_ERROR_OTHER;
+        }
+    }
+
+    if (jniContext->released || !jniContext->poll_image_thread->joinable()) {
+        LOGE("Poll image thread already shut down.");
+        av_frame_free(&frame);
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    // frame success! offer to pool
+    if (result == 0) {
+        auto offer_rst = jniContext->image_input_buffer->offer(frame);
+        if (!offer_rst) {
+            av_frame_free(&frame);
+            LOGW("Offer to image_input_buffer failed.");
+        }
+    }
+
+    // --- take cvt frame & return
+
+    // success
+    int width = env->GetIntField(jOutputBuffer, jniContext->width_field);
+    int height = env->GetIntField(jOutputBuffer, jniContext->height_field);
+
+    AVFrame *cvt_frame = nullptr;
+    jniContext->image_output_buffer->poll(cvt_frame);
+    if (cvt_frame == nullptr) {
+        LOGW("Poll from image_output_buffer failed.");
+        return VIDEO_DECODER_ERROR_INVALID_DATA;
+    }
+
+    auto dst_width = cvt_frame->width;
+    auto dst_height = cvt_frame->height;
+    int output_width = dst_width;
+    int output_height = dst_height;
+
+    // adjust rotate degree
+    if (jniContext->rotate_degree == 90 || jniContext->rotate_degree == 270) {
+        output_width = dst_height;
+        output_height = dst_width;
+    }
+    // adjust ColorSpace
+    int color_space = cvt_colorspace(cvt_frame->colorspace);
+
+    int stride_y = output_width;
+    int stride_uv = (output_width + 1) / 2;
+
+    jboolean init_result = JNI_TRUE;
+    if (width != output_width && height != output_height) {
+        // init data
+        init_result = env->CallBooleanMethod(jOutputBuffer, jniContext->init_for_yuv_frame_method,
+                                             output_width, output_height, stride_y, stride_uv, color_space);
+        LOGE("init_for_yuv_frame_method! wh [%d,%d], buffer wh [%d,%d]", output_width, output_height, width, height);
+    } else {
+        env->SetLongField(jOutputBuffer, jniContext->pts_field, cvt_frame->pts);
+    }
+
+    if (env->ExceptionCheck()) {
+        av_frame_free(&cvt_frame);
+        // Exception is thrown in Java when returning from the native call.
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+    if (!init_result) {
+        av_frame_free(&cvt_frame);
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    auto data_object = env->GetObjectField(jOutputBuffer, jniContext->data_field);
+    auto *data = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(data_object));
+
+    const int32_t height_uv = (output_height + 1) / 2;
+    const uint64_t length_y = stride_y * output_height;
+    const uint64_t length_uv = stride_uv * height_uv;
+
+    // rotate YUV data & copy to OutputBuffer
+    libyuv::RotationMode rotate = cvt_rotate(jniContext->rotate_degree);
+    libyuv::I420Rotate(
+            cvt_frame->data[0], cvt_frame->linesize[0],
+            cvt_frame->data[1], cvt_frame->linesize[1],
+            cvt_frame->data[2], cvt_frame->linesize[2],
+            data, stride_y,
+            data + length_y, stride_uv,
+            data + length_y + length_uv, stride_uv,
+            cvt_frame->width, cvt_frame->height, rotate
+    );
+    av_frame_free(&cvt_frame);
+    return result;
+}
+
+VIDEO_DECODER_FUNC(jint, ffmpegRenderFrame, jlong jContext, jobject jSurface,
+                   jobject jOutputBuffer, jint displayedWidth, jint displayedHeight) {
+    JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
+    if (!jniContext->MaybeAcquireNativeWindow(env, jSurface)) {
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    if (jniContext->native_window_width != displayedWidth ||
+        jniContext->native_window_height != displayedHeight) {
+        int rst = ANativeWindow_setBuffersGeometry(
+                jniContext->native_window,
+                displayedWidth,
+                displayedHeight,
+                kImageFormatYV12);
+        if (rst) {
+            LOGE("kJniStatusANativeWindowError ANativeWindow_setBuffersGeometry rst [%d]", rst);
+            return VIDEO_DECODER_ERROR_OTHER;
+        }
+        jniContext->native_window_width = displayedWidth;
+        jniContext->native_window_height = displayedHeight;
+    }
+
+    ANativeWindow_Buffer native_window_buffer;
+    int result = ANativeWindow_lock(jniContext->native_window, &native_window_buffer, nullptr);
+    if (result == -19) {
+        // Surface: dequeueBuffer failed (No such device)
+        jniContext->surface = nullptr;
+        return VIDEO_DECODER_ERROR_SURFACE;
+    } else if (result || native_window_buffer.bits == nullptr) {
+        LOGE("kJniStatusANativeWindowError ANativeWindow_lock rst [%d]", result);
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    auto data_object = env->GetObjectField(jOutputBuffer, jniContext->data_field);
+    auto *data = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(data_object));
+
+    auto frame_width = env->GetIntField(jOutputBuffer, jniContext->width_field);
+    auto frame_height = env->GetIntField(jOutputBuffer, jniContext->height_field);
+    int src_stride_y = frame_width;
+    int src_stride_uv = (frame_width + 1) / 2;
+    const int32_t height_uv = (frame_height + 1) / 2;
+    const uint64_t src_length_y = src_stride_y * frame_height;
+    const uint64_t src_length_uv = src_stride_uv * height_uv;
+
+    const int window_y_plane_size = native_window_buffer.stride * native_window_buffer.height;
+    const int32_t window_uv_plane_height = (native_window_buffer.height + 1) / 2;
+    const int window_uv_plane_stride = AlignTo16(native_window_buffer.stride / 2);
+    const int window_v_plane_height = std::min(window_uv_plane_height, native_window_buffer.height);
+    const int window_v_plane_size = window_v_plane_height * window_uv_plane_stride;
+    const auto window_bits = reinterpret_cast<uint8_t *>(native_window_buffer.bits);
+
+    libyuv::I420Copy(
+            data, src_stride_y,
+            data + src_length_y, src_stride_uv,
+            data + src_length_y + src_length_uv, src_stride_uv,
+            window_bits, native_window_buffer.stride,
+            window_bits + window_y_plane_size + window_v_plane_size, window_uv_plane_stride,
+            window_bits + window_y_plane_size, window_uv_plane_stride,
+            native_window_buffer.width, native_window_buffer.height
+    );
+    int rst = ANativeWindow_unlockAndPost(jniContext->native_window);
+    if (rst) {
+        LOGE("kJniStatusANativeWindowError ANativeWindow_unlockAndPost rst [%d]", rst);
+        return VIDEO_DECODER_ERROR_OTHER;
+    }
+
+    return VIDEO_DECODER_SUCCESS;
+}
+
+

From bb281936ca6b945d8f646fef49b62a63eb92f6a3 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Sun, 4 Aug 2024 23:03:48 +0800
Subject: [PATCH 2/9] Fix the Compile Ffmpeg Options & Update Some Comments.

---
 .../decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java     | 2 +-
 libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh       | 6 +++---
 libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc         | 4 ++--
 3 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
index b2224a265ee..49fde73d8e3 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
@@ -39,11 +39,11 @@
 import androidx.media3.exoplayer.video.DecoderVideoRenderer;
 import androidx.media3.exoplayer.video.VideoRendererEventListener;
 
-// TODO: Merge actual implementation in https://github.com/google/ExoPlayer/pull/7132.
 /**
  * <b>NOTE: This class if under development and is not yet functional.</b>
  *
  * <p>Decodes and renders video using FFmpeg.
+ * <p>Merge actual implementation in https://github.com/google/ExoPlayer/pull/7132.
  */
 @UnstableApi
 public final class ExperimentalFfmpegVideoRenderer extends DecoderVideoRenderer {
diff --git a/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh b/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
index c38263e4406..91d79369ae7 100755
--- a/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
+++ b/libraries/decoder_ffmpeg/src/main/jni/build_ffmpeg.sh
@@ -30,13 +30,13 @@ JOBS="$(nproc 2> /dev/null || sysctl -n hw.ncpu 2> /dev/null || echo 4)"
 echo "Using $JOBS jobs for make"
 COMMON_OPTIONS="
     --target-os=android
-    --enable-static
-    --disable-shared
+    --disable-static
+    --enable-shared
     --disable-doc
     --disable-programs
     --disable-everything
     --disable-avdevice
-    --disable-avformat
+    --enable-avformat
     --enable-swscale
     --disable-postproc
     --disable-avfilter
diff --git a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
index b79c57ed454..6ed15a0666e 100644
--- a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
+++ b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
@@ -501,12 +501,12 @@ struct JniContext {
 
     ANativeWindow *native_window = nullptr;
     jobject surface = nullptr;
-    // 旋转角度
+    // rorate degree from InputFormat
     int rotate_degree = 0;
     int native_window_width = 0;
     int native_window_height = 0;
 
-    // 接收数据的线程
+    // Thread to Convert Frame
     std::thread *poll_image_thread;
     bool released = false;
     BlockingQueue<AVFrame *> *image_output_buffer = nullptr;

From 9a4a2820b9bf6063bc6ebc064045f1f191af2d27 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Tue, 6 Aug 2024 15:07:19 +0800
Subject: [PATCH 3/9] Remove `ExperimentalFfmpegVideoRenderer` DecoderReuse
 Support & Fix supportsFormat NPE!

---
 .../ffmpeg/ExperimentalFfmpegVideoRenderer.java | 17 ++++-------------
 .../media3/decoder/ffmpeg/FfmpegLibrary.java    |  5 ++++-
 2 files changed, 8 insertions(+), 14 deletions(-)

diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
index 49fde73d8e3..6b087d66ba5 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoRenderer.java
@@ -15,18 +15,15 @@
  */
 package androidx.media3.decoder.ffmpeg;
 
-import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_MIME_TYPE_CHANGED;
-import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
-import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_YES_WITHOUT_RECONFIGURATION;
 import static java.lang.Runtime.getRuntime;
 
 import android.os.Handler;
 import android.view.Surface;
+
 import androidx.annotation.Nullable;
 import androidx.media3.common.C;
 import androidx.media3.common.Format;
 import androidx.media3.common.MimeTypes;
-import androidx.media3.common.util.Assertions;
 import androidx.media3.common.util.TraceUtil;
 import androidx.media3.common.util.UnstableApi;
 import androidx.media3.common.util.Util;
@@ -131,10 +128,10 @@ public String getName() {
 
   @Override
   public final @RendererCapabilities.Capabilities int supportsFormat(Format format) {
-    String mimeType = Assertions.checkNotNull(format.sampleMimeType);
+    String mimeType = format.sampleMimeType;
     if (!FfmpegLibrary.isAvailable() || !MimeTypes.isVideo(mimeType)) {
       return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
-    } else if (!FfmpegLibrary.supportsFormat(format.sampleMimeType)) {
+    } else if (!FfmpegLibrary.supportsFormat(mimeType)) {
       return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
     } else if (format.cryptoType != C.CRYPTO_TYPE_NONE) {
       return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM);
@@ -184,13 +181,7 @@ protected void setDecoderOutputMode(@C.VideoOutputMode int outputMode) {
   @Override
   protected DecoderReuseEvaluation canReuseDecoder(
       String decoderName, Format oldFormat, Format newFormat) {
-    boolean sameMimeType = Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType);
     // TODO: Ability to reuse the decoder may be MIME type dependent.
-    return new DecoderReuseEvaluation(
-        decoderName,
-        oldFormat,
-        newFormat,
-        sameMimeType ? REUSE_RESULT_YES_WITHOUT_RECONFIGURATION : REUSE_RESULT_NO,
-        sameMimeType ? 0 : DISCARD_REASON_MIME_TYPE_CHANGED);
+    return super.canReuseDecoder(decoderName, oldFormat, newFormat);
   }
 }
diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
index 62b557bfd4f..05816e5a065 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
@@ -95,10 +95,13 @@ public static int getInputBufferPaddingSize() {
    *
    * @param mimeType The MIME type to check.
    */
-  public static boolean supportsFormat(String mimeType) {
+  public static boolean supportsFormat(@Nullable String mimeType) {
     if (!isAvailable()) {
       return false;
     }
+    if (mimeType == null) {
+      return false;
+    }
     @Nullable String codecName = getCodecName(mimeType);
     if (codecName == null) {
       return false;

From 24cc94504f5aa59bf0e8310d01c9d10055e5afe8 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Thu, 15 Aug 2024 19:35:43 +0800
Subject: [PATCH 4/9] Opt. Ffmpeg avcodec decode speed by multithreading

---
 libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
index 6ed15a0666e..3afdc03df2e 100644
--- a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
+++ b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
@@ -674,10 +674,10 @@ JniContext *createVideoContext(JNIEnv *env,
     }
 
     // opt decode speed.
-    codecContext->flags |= AV_CODEC_FLAG_LOW_DELAY;
     codecContext->skip_loop_filter = AVDISCARD_ALL;
     codecContext->skip_frame = AVDISCARD_DEFAULT;
     codecContext->thread_count = threads;
+    codecContext->thread_type = FF_THREAD_FRAME;
     codecContext->err_recognition = AV_EF_IGNORE_ERR;
     int result = avcodec_open2(codecContext, codec, NULL);
     if (result < 0) {

From 53bc7dfe288635b63208dd805f19e3943684bb92 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Thu, 15 Aug 2024 19:38:16 +0800
Subject: [PATCH 5/9] Add support for VP8, VP9, WebM in FFmpeg.

---
 .../androidx/media3/decoder/ffmpeg/FfmpegLibrary.java    | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
index 05816e5a065..05b4beebfc0 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
@@ -22,6 +22,7 @@
 import androidx.media3.common.util.LibraryLoader;
 import androidx.media3.common.util.Log;
 import androidx.media3.common.util.UnstableApi;
+
 import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
 
 /** Configures and queries the underlying native library. */
@@ -157,6 +158,12 @@ public static boolean supportsFormat(@Nullable String mimeType) {
       case MimeTypes.VIDEO_H265:
       case MimeTypes.VIDEO_DOLBY_VISION:
         return "hevc";
+      case MimeTypes.VIDEO_VP8:
+        return "vp8";
+      case MimeTypes.VIDEO_VP9:
+        return "vp9";
+      case MimeTypes.VIDEO_WEBM:
+        return "webm";
       default:
         return null;
     }
@@ -167,4 +174,4 @@ public static boolean supportsFormat(@Nullable String mimeType) {
   private static native int ffmpegGetInputBufferPaddingSize();
 
   private static native boolean ffmpegHasDecoder(String codecName);
-}
+}
\ No newline at end of file

From 00ef1b5dddb2e5b1141de6ca976b978108328319 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Mon, 19 Aug 2024 15:15:50 +0800
Subject: [PATCH 6/9] Fix the decoder blocking when flush the
 FfmpegVideoDecoder.

---
 .../ExperimentalFfmpegVideoDecoder.java       |   3 +-
 .../src/main/jni/BlockingQueue.h              | 132 ------------------
 .../decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc | 128 +++--------------
 3 files changed, 25 insertions(+), 238 deletions(-)
 delete mode 100644 libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h

diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
index 3707b56d08d..9cc7f561a0d 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/ExperimentalFfmpegVideoDecoder.java
@@ -18,6 +18,7 @@
 import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE;
 
 import android.view.Surface;
+
 import androidx.annotation.Nullable;
 import androidx.annotation.VisibleForTesting;
 import androidx.media3.common.C;
@@ -28,6 +29,7 @@
 import androidx.media3.decoder.DecoderInputBuffer;
 import androidx.media3.decoder.SimpleDecoder;
 import androidx.media3.decoder.VideoDecoderOutputBuffer;
+
 import java.nio.ByteBuffer;
 import java.util.List;
 
@@ -210,7 +212,6 @@ public void renderToSurface(VideoDecoderOutputBuffer outputBuffer, Surface surfa
     }
     int rst = ffmpegRenderFrame(nativeContext, surface, outputBuffer, outputBuffer.width,
         outputBuffer.height);
-//    Log.d(TAG, "renderToSurface: rst = " + rst + ",surface = " + surface + ",buffer = " + outputBuffer.timeUs);
     if (rst == VIDEO_DECODER_ERROR_OTHER) {
       throw new FfmpegDecoderException(
           "Buffer render error: ");
diff --git a/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h b/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h
deleted file mode 100644
index 266a8ea9d07..00000000000
--- a/libraries/decoder_ffmpeg/src/main/jni/BlockingQueue.h
+++ /dev/null
@@ -1,132 +0,0 @@
-#include <mutex>
-#include <condition_variable>
-#include <deque>
-#include <chrono>
-
-template<class T>
-class BlockingQueue {
-public:
-    using size_type = typename std::deque<T>::size_type;
-
-public:
-    BlockingQueue(const int cap = -1) : m_maxCapacity(cap) {}
-
-    ~BlockingQueue() {}
-
-    BlockingQueue(const BlockingQueue &) = delete;
-
-    BlockingQueue &operator=(const BlockingQueue &) = delete;
-
-public:
-    void put(const T t);
-
-    T take();
-
-    bool empty() const {
-        std::lock_guard<std::mutex> lock(m_mutex);
-        return m_queue.empty();
-    }
-
-    bool full() const {
-        if (-1 == m_maxCapacity)
-            return false;
-        std::lock_guard<std::mutex> lock(m_mutex);
-        return m_queue.size() >= m_maxCapacity;
-    }
-
-    size_type size() {
-        std::lock_guard<std::mutex> lock(m_mutex);
-        return m_queue.size();
-    }
-
-public:
-    bool offer(const T t);
-
-    bool poll(T &t);
-
-    bool offer(const T t, long mils);
-
-    bool poll(T &t, long mils);
-
-private:
-    std::deque<T> m_queue;
-    const int m_maxCapacity;
-    mutable std::mutex m_mutex;
-    std::condition_variable m_cond_empty;
-    std::condition_variable m_cond_full;
-};
-
-template<class T>
-void BlockingQueue<T>::put(const T t) {
-    std::unique_lock<std::mutex> lock(m_mutex);
-    if (m_maxCapacity != -1) {
-        m_cond_full.wait(lock, [this] { return m_queue.size() < m_maxCapacity; });
-    }
-    m_queue.push_back(t);
-    m_cond_empty.notify_all();
-}
-
-template<class T>
-T BlockingQueue<T>::take() {
-    std::unique_lock<std::mutex> lock(m_mutex);
-    // take必须判断队列为空
-    m_cond_empty.wait(lock, [&]() { return !m_queue.empty(); });
-    auto res = m_queue.front();
-    m_queue.pop_front();
-    m_cond_full.notify_all();
-    return res;
-}
-
-template<class T>
-bool BlockingQueue<T>::offer(const T t) {
-    std::unique_lock<std::mutex> lock(m_mutex);
-    if (m_maxCapacity != -1 && m_queue.size() >= m_maxCapacity) {
-        return false;
-    }
-    m_queue.push_back(t);
-    m_cond_empty.notify_all();
-    return true;
-}
-
-template<class T>
-bool BlockingQueue<T>::poll(T &t) {
-    std::unique_lock<std::mutex> lock(m_mutex);
-    if (m_queue.empty()) {
-        return false;
-    }
-    t = m_queue.front();
-    m_queue.pop_front();
-    m_cond_full.notify_all();
-    return true;
-}
-
-template<class T>
-bool BlockingQueue<T>::offer(const T t, long mils) {
-    std::unique_lock<std::mutex> lock(m_mutex);
-    std::chrono::milliseconds time(mils);
-    if (m_maxCapacity != -1) {
-        bool result = m_cond_full.wait_for(lock, time,
-                                       [&] { return m_queue.size() < m_maxCapacity; });
-        if (!result) {
-            return false;
-        }
-    }
-    m_queue.push_back(t);
-    m_cond_empty.notify_all();
-    return true;
-}
-
-template<class T>
-bool BlockingQueue<T>::poll(T &t, long mils) {
-    std::chrono::milliseconds time(mils);
-    std::unique_lock<std::mutex> lock(m_mutex);
-    bool result = m_cond_empty.wait_for(lock, time,
-                                        [&] { return !m_queue.empty(); });
-    if (!result) {
-        return false;
-    }
-    t = m_queue.front();
-    m_queue.pop_front();
-    m_cond_full.notify_all();
-    return true;
-}
\ No newline at end of file
diff --git a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
index 3afdc03df2e..4055113542c 100644
--- a/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
+++ b/libraries/decoder_ffmpeg/src/main/jni/ffmpeg_jni.cc
@@ -19,7 +19,6 @@
 #include <thread>
 #include <android/native_window.h>
 #include <android/native_window_jni.h>
-#include "BlockingQueue.h"
 #include <libyuv.h>
 #include <libyuv/scale.h>
 
@@ -505,12 +504,6 @@ struct JniContext {
     int rotate_degree = 0;
     int native_window_width = 0;
     int native_window_height = 0;
-
-    // Thread to Convert Frame
-    std::thread *poll_image_thread;
-    bool released = false;
-    BlockingQueue<AVFrame *> *image_output_buffer = nullptr;
-    BlockingQueue<AVFrame *> *image_input_buffer = nullptr;
 };
 
 constexpr int AlignTo16(int value) { return (value + 15) & (~15); }
@@ -539,7 +532,7 @@ constexpr int cvt_colorspace(AVColorSpace colorSpace) {
  * and scale
  * @return AVFrame
  */
-AVFrame *cvt_frame(JniContext *jniContext,
+AVFrame *cvt_format(JniContext *jniContext,
                    AVFrame *src,
                    AVPixelFormat dst_format,
                    int dst_width,
@@ -594,57 +587,6 @@ libyuv::RotationMode cvt_rotate(int degree) {
     return rotate;
 }
 
-/**
- * Single Thread to Convert Standard YUV420
- */
-void cvt_image_runnable(JniContext *jniContext) {
-    while (!jniContext->released) {
-        auto output_buffer = jniContext->image_output_buffer;
-        auto input_buffer = jniContext->image_input_buffer;
-
-        AVFrame *input = nullptr;
-        auto poll_rst = input_buffer->poll(input, 100L);
-        if (!poll_rst || input == nullptr) {
-            continue;
-        }
-
-        // success
-        // pixformat map to yuv420p
-        auto output = cvt_frame(jniContext, input, AVPixelFormat::AV_PIX_FMT_YUV420P, input->width, input->height);
-        if (!output) {
-            LOGE("Failed to cvt_frame");
-            av_frame_free(&input);
-            jniContext->released = true;
-            return;
-        }
-        av_frame_free(&input);
-
-        {
-            auto offer_rst = output_buffer->offer(output, 100L);
-            if (!offer_rst) {
-                av_frame_free(&output);
-            }
-        }
-    }
-
-    // free
-    while (!jniContext->image_input_buffer->empty()) {
-        auto buffer = jniContext->image_input_buffer->take();
-        av_frame_free(&buffer);
-    }
-    while (!jniContext->image_output_buffer->empty()) {
-        auto buffer = jniContext->image_output_buffer->take();
-        av_frame_free(&buffer);
-    }
-
-    auto swsContext = jniContext->swsContext;
-    if (swsContext) {
-        sws_freeContext(swsContext);
-        jniContext->swsContext = NULL;
-    }
-}
-
-
 JniContext *createVideoContext(JNIEnv *env,
                                const AVCodec *codec,
                                jbyteArray extraData,
@@ -688,11 +630,6 @@ JniContext *createVideoContext(JNIEnv *env,
 
     jniContext->codecContext = codecContext;
 
-    jniContext->image_output_buffer = new BlockingQueue<AVFrame *>(5);
-    jniContext->image_input_buffer = new BlockingQueue<AVFrame *>(5);
-    jniContext->poll_image_thread = new std::thread(cvt_image_runnable, jniContext);
-    pthread_setname_np(jniContext->poll_image_thread->native_handle(), "m3:ffmpeg:cvt");
-
     // Populate JNI References.
     const jclass outputBufferClass = env->FindClass("androidx/media3/decoder/VideoDecoderOutputBuffer");
     jniContext->data_field = env->GetFieldID(outputBufferClass, "data", "Ljava/nio/ByteBuffer;");
@@ -741,14 +678,17 @@ VIDEO_DECODER_FUNC(jlong, ffmpegReset, jlong jContext) {
 VIDEO_DECODER_FUNC(void, ffmpegRelease, jlong jContext) {
     JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
     AVCodecContext *context = jniContext->codecContext;
+    SwsContext *swsContext = jniContext->swsContext;
 
     if (context) {
         avcodec_free_context(&context);
         jniContext->codecContext = NULL;
     }
 
-    jniContext->released = true;
-    jniContext->poll_image_thread->detach();
+    if (swsContext) {
+        sws_freeContext(swsContext);
+        jniContext->swsContext = NULL;
+    }
 }
 
 
@@ -787,64 +727,42 @@ VIDEO_DECODER_FUNC(jint, ffmpegReceiveFrame, jlong jContext, jint outputMode, jo
     JniContext *const jniContext = reinterpret_cast<JniContext *>(jContext);
     AVCodecContext *avContext = jniContext->codecContext;
     int result = 0;
-    AVFrame *frame = av_frame_alloc();
-    if (!frame) {
+    AVFrame *raw_frame = av_frame_alloc();
+    if (!raw_frame) {
         LOGE("Failed to allocate output frame.");
         return VIDEO_DECODER_ERROR_OTHER;
     }
 
-    result = avcodec_receive_frame(avContext, frame);
-
-    if (decodeOnly) {
-        av_frame_free(&frame);
-        return VIDEO_DECODER_ERROR_INVALID_DATA;
-    }
+    result = avcodec_receive_frame(avContext, raw_frame);
 
-    if (result == AVERROR(EAGAIN)) {
+    if (decodeOnly || result == AVERROR(EAGAIN)) {
         // This is not an error. The input data was decode-only or no displayable
         // frames are available.
-        av_frame_free(&frame);
+        av_frame_free(&raw_frame);
         return VIDEO_DECODER_ERROR_INVALID_DATA;
     }
-    if (result != 0) {
-        av_frame_free(&frame);
-        if (result == AVERROR(EAGAIN)) {
-            // This is not an error. The input data was decode-only or no displayable
-            // frames are available.
-        } else {
-            logError("avcodec_receive_frame", result);
-            return VIDEO_DECODER_ERROR_OTHER;
-        }
-    }
 
-    if (jniContext->released || !jniContext->poll_image_thread->joinable()) {
-        LOGE("Poll image thread already shut down.");
-        av_frame_free(&frame);
+    // Some error!
+    if (result != 0) {
+        av_frame_free(&raw_frame);
+        logError("avcodec_receive_frame", result);
         return VIDEO_DECODER_ERROR_OTHER;
     }
 
-    // frame success! offer to pool
-    if (result == 0) {
-        auto offer_rst = jniContext->image_input_buffer->offer(frame);
-        if (!offer_rst) {
-            av_frame_free(&frame);
-            LOGW("Offer to image_input_buffer failed.");
-        }
+    // Use swscale to cvt format to YUV420P
+    AVFrame *cvt_frame = cvt_format(jniContext, raw_frame, AV_PIX_FMT_YUV420P, raw_frame->width, raw_frame->height);
+    if (cvt_frame == nullptr) {
+        av_frame_free(&raw_frame);
+        LOGW("Convert To YUV420P failed.");
+        return VIDEO_DECODER_ERROR_OTHER;
     }
 
-    // --- take cvt frame & return
+    // Convert Success! free the raw frame!
+    av_frame_free(&raw_frame);
 
-    // success
     int width = env->GetIntField(jOutputBuffer, jniContext->width_field);
     int height = env->GetIntField(jOutputBuffer, jniContext->height_field);
 
-    AVFrame *cvt_frame = nullptr;
-    jniContext->image_output_buffer->poll(cvt_frame);
-    if (cvt_frame == nullptr) {
-        LOGW("Poll from image_output_buffer failed.");
-        return VIDEO_DECODER_ERROR_INVALID_DATA;
-    }
-
     auto dst_width = cvt_frame->width;
     auto dst_height = cvt_frame->height;
     int output_width = dst_width;

From 57346bbf36a7456e99008298cf55ce16011401db Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Wed, 21 Aug 2024 22:57:01 +0800
Subject: [PATCH 7/9] Add the mpeg&mpeg2 Support for FfmpegVideoRenderer.

---
 .../java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java    | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
index 05b4beebfc0..b7440f868b1 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
@@ -164,6 +164,10 @@ public static boolean supportsFormat(@Nullable String mimeType) {
         return "vp9";
       case MimeTypes.VIDEO_WEBM:
         return "webm";
+      case MimeTypes.VIDEO_MPEG:
+        return "mpegvideo";
+      case MimeTypes.VIDEO_MPEG2:
+        return "mpeg2video";
       default:
         return null;
     }

From f01a8aa4446bd3bcf41b167ef9e5bad7d9a7f300 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Tue, 24 Sep 2024 18:27:35 +0800
Subject: [PATCH 8/9] Add support from Apple ProRes Video
 (ap4x/ap4h/apch/apcn/apcs/apco) through FfmpegDecoder.

---
 .../java/androidx/media3/common/MimeTypes.java   |  1 +
 .../media3/decoder/ffmpeg/FfmpegLibrary.java     |  2 ++
 .../java/androidx/media3/extractor/mp4/Atom.java | 16 ++++++++++++++++
 .../media3/extractor/mp4/AtomParsers.java        | 11 ++++++++++-
 4 files changed, 29 insertions(+), 1 deletion(-)

diff --git a/libraries/common/src/main/java/androidx/media3/common/MimeTypes.java b/libraries/common/src/main/java/androidx/media3/common/MimeTypes.java
index fc69c0bd316..f90967b8ad2 100644
--- a/libraries/common/src/main/java/androidx/media3/common/MimeTypes.java
+++ b/libraries/common/src/main/java/androidx/media3/common/MimeTypes.java
@@ -45,6 +45,7 @@ public final class MimeTypes {
   public static final String VIDEO_H263 = BASE_TYPE_VIDEO + "/3gpp";
   public static final String VIDEO_H264 = BASE_TYPE_VIDEO + "/avc";
   public static final String VIDEO_H265 = BASE_TYPE_VIDEO + "/hevc";
+  public static final String VIDEO_ProRes = BASE_TYPE_VIDEO + "/prores";
   @UnstableApi public static final String VIDEO_VP8 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp8";
   @UnstableApi public static final String VIDEO_VP9 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp9";
   public static final String VIDEO_AV1 = BASE_TYPE_VIDEO + "/av01";
diff --git a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
index b7440f868b1..dea86c84e26 100644
--- a/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
+++ b/libraries/decoder_ffmpeg/src/main/java/androidx/media3/decoder/ffmpeg/FfmpegLibrary.java
@@ -168,6 +168,8 @@ public static boolean supportsFormat(@Nullable String mimeType) {
         return "mpegvideo";
       case MimeTypes.VIDEO_MPEG2:
         return "mpeg2video";
+      case MimeTypes.VIDEO_ProRes:
+        return "prores";
       default:
         return null;
     }
diff --git a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/Atom.java b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/Atom.java
index 6f4537ddbe3..d0b63bf141a 100644
--- a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/Atom.java
+++ b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/Atom.java
@@ -446,6 +446,22 @@
   @SuppressWarnings("ConstantCaseForConstants")
   public static final int TYPE_iacb = 0x69616362;
 
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_ap4x = 0x61703468;
+
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_apch = 0x61706368;
+
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_apcn = 0x6170636e;
+
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_apcs = 0x61706373;
+
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_apco = 0x6170636f;
+
+
   public final int type;
 
   public Atom(int type) {
diff --git a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/AtomParsers.java b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/AtomParsers.java
index 7a4c01fbd3c..c33effe896d 100644
--- a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/AtomParsers.java
+++ b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/AtomParsers.java
@@ -982,7 +982,12 @@ private static StsdData parseStsd(
           || childAtomType == Atom.TYPE_dvav
           || childAtomType == Atom.TYPE_dva1
           || childAtomType == Atom.TYPE_dvhe
-          || childAtomType == Atom.TYPE_dvh1) {
+          || childAtomType == Atom.TYPE_dvh1
+          || childAtomType == Atom.TYPE_ap4x
+          || childAtomType == Atom.TYPE_apch
+          || childAtomType == Atom.TYPE_apcn
+          || childAtomType == Atom.TYPE_apcs
+          || childAtomType == Atom.TYPE_apco) {
         parseVideoSampleEntry(
             stsd,
             childAtomType,
@@ -1148,6 +1153,10 @@ private static void parseVideoSampleEntry(
       mimeType = MimeTypes.VIDEO_MPEG;
     } else if (atomType == Atom.TYPE_H263) {
       mimeType = MimeTypes.VIDEO_H263;
+    } else if (atomType == Atom.TYPE_ap4x || atomType == Atom.TYPE_apch
+            || atomType == Atom.TYPE_apcn || atomType == Atom.TYPE_apcs
+            || atomType == Atom.TYPE_apco) {
+      mimeType = MimeTypes.VIDEO_ProRes;
     }
 
     @Nullable List<byte[]> initializationData = null;

From a2a6d378d32ae82040fec3864beaa6ddc1daeac7 Mon Sep 17 00:00:00 2001
From: tokii <mottchenz@gmail.com>
Date: Tue, 24 Sep 2024 20:02:35 +0800
Subject: [PATCH 9/9] Fix ProRes Support for ap4h & ap4x.

---
 .../src/main/java/androidx/media3/container/Mp4Box.java    | 5 ++++-
 .../main/java/androidx/media3/extractor/mp4/BoxParser.java | 7 ++++---
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/libraries/container/src/main/java/androidx/media3/container/Mp4Box.java b/libraries/container/src/main/java/androidx/media3/container/Mp4Box.java
index c8007c1ef77..fed3f4902c9 100644
--- a/libraries/container/src/main/java/androidx/media3/container/Mp4Box.java
+++ b/libraries/container/src/main/java/androidx/media3/container/Mp4Box.java
@@ -453,7 +453,10 @@ public abstract class Mp4Box {
   public static final int TYPE_edvd = 0x65647664;
 
   @SuppressWarnings("ConstantCaseForConstants")
-  public static final int TYPE_ap4x = 0x61703468;
+  public static final int TYPE_ap4x = 0x61703478;
+
+  @SuppressWarnings("ConstantCaseForConstants")
+  public static final int TYPE_ap4h = 0x61703468;
 
   @SuppressWarnings("ConstantCaseForConstants")
   public static final int TYPE_apch = 0x61706368;
diff --git a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/BoxParser.java b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/BoxParser.java
index c383e90ed7c..10e075af7c8 100644
--- a/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/BoxParser.java
+++ b/libraries/extractor/src/main/java/androidx/media3/extractor/mp4/BoxParser.java
@@ -1003,6 +1003,7 @@ private static StsdData parseStsd(
           || childAtomType == Mp4Box.TYPE_dvhe
           || childAtomType == Mp4Box.TYPE_dvh1
           || childAtomType == Mp4Box.TYPE_ap4x
+          || childAtomType == Mp4Box.TYPE_ap4h
           || childAtomType == Mp4Box.TYPE_apch
           || childAtomType == Mp4Box.TYPE_apcn
           || childAtomType == Mp4Box.TYPE_apcs
@@ -1172,9 +1173,9 @@ private static void parseVideoSampleEntry(
       mimeType = MimeTypes.VIDEO_MPEG;
     } else if (atomType == Mp4Box.TYPE_H263) {
       mimeType = MimeTypes.VIDEO_H263;
-    } else if (atomType == Mp4Box.TYPE_ap4x || atomType == Mp4Box.TYPE_apch
-            || atomType == Mp4Box.TYPE_apcn || atomType == Mp4Box.TYPE_apcs
-            || atomType == Mp4Box.TYPE_apco) {
+    } else if (atomType == Mp4Box.TYPE_ap4x || atomType == Mp4Box.TYPE_ap4h
+            || atomType == Mp4Box.TYPE_apch || atomType == Mp4Box.TYPE_apcn
+            || atomType == Mp4Box.TYPE_apcs || atomType == Mp4Box.TYPE_apco) {
       mimeType = MimeTypes.VIDEO_ProRes;
     }