diff --git a/common/Android.bp b/common/Android.bp
index e9c5fe37b221775d2c14b139b3808b638edf7aed..5e8d647b9001085e393f73927b5516ae6a1e8ae6 100644
--- a/common/Android.bp
+++ b/common/Android.bp
@@ -20,11 +20,10 @@ cc_library {
         "EncodeHelpers.cpp",
         "FormatConverter.cpp",
         "Fourcc.cpp",
+        "H264NalParser.cpp",
+        "HEVCNalParser.cpp",
         "NalParser.cpp",
-        "V4L2ComponentCommon.cpp",
         "VideoTypes.cpp",
-        "V4L2Device.cpp",
-        "V4L2DevicePoller.cpp",
         "VideoPixelFormat.cpp",
     ],
 
diff --git a/common/Common.cpp b/common/Common.cpp
index 79243ec5e39800238472afe5a8f8b7bef14ef99d..24bb97554a687d2f669dc39d500cc4ad24841bf4 100644
--- a/common/Common.cpp
+++ b/common/Common.cpp
@@ -32,4 +32,22 @@ std::string toString(const ui::Size& size) {
     return std::to_string(size.width) + "x" + std::to_string(size.height);
 }
 
+// Check whether the specified profile is a valid profile for the specified codec.
+bool isValidProfileForCodec(VideoCodec codec, C2Config::profile_t profile) {
+    switch (codec) {
+    case VideoCodec::H264:
+        return ((profile >= C2Config::PROFILE_AVC_BASELINE) &&
+                (profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH));
+    case VideoCodec::VP8:
+        return ((profile >= C2Config::PROFILE_VP8_0) && (profile <= C2Config::PROFILE_VP8_3));
+    case VideoCodec::VP9:
+        return ((profile >= C2Config::PROFILE_VP9_0) && (profile <= C2Config::PROFILE_VP9_3));
+    case VideoCodec::HEVC:
+        return ((profile >= C2Config::PROFILE_HEVC_MAIN) &&
+                (profile <= C2Config::PROFILE_HEVC_3D_MAIN));
+    default:
+        return false;
+    }
+}
+
 }  // namespace android
diff --git a/common/EncodeHelpers.cpp b/common/EncodeHelpers.cpp
index c07281f00b2401b504f29f59db1621e0be7a8c4d..c1a9425f30d0403cd86a51dce29274198bc42e73 100644
--- a/common/EncodeHelpers.cpp
+++ b/common/EncodeHelpers.cpp
@@ -14,7 +14,7 @@
 #include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
 
-#include <v4l2_codec2/common/NalParser.h>
+#include <v4l2_codec2/common/H264NalParser.h>
 
 namespace android {
 
@@ -114,15 +114,15 @@ bool extractSPSPPS(const uint8_t* data, size_t length, std::vector<uint8_t>* sps
                    std::vector<uint8_t>* pps) {
     bool foundSPS = false;
     bool foundPPS = false;
-    NalParser parser(data, length);
+    H264NalParser parser(data, length);
     while (!(foundSPS && foundPPS) && parser.locateNextNal()) {
         switch (parser.type()) {
-        case NalParser::kSPSType:
+        case H264NalParser::kSPSType:
             sps->resize(parser.length());
             memcpy(sps->data(), parser.data(), parser.length());
             foundSPS = true;
             break;
-        case NalParser::kPPSType:
+        case H264NalParser::kPPSType:
             pps->resize(parser.length());
             memcpy(pps->data(), parser.data(), parser.length());
             foundPPS = true;
@@ -155,24 +155,24 @@ size_t prependSPSPPSToIDR(const uint8_t* src, size_t srcSize, uint8_t* dst, size
                           std::vector<uint8_t>* sps, std::vector<uint8_t>* pps) {
     bool foundStreamParams = false;
     size_t remainingDstSize = dstSize;
-    NalParser parser(src, srcSize);
+    H264NalParser parser(src, srcSize);
     while (parser.locateNextNal()) {
         switch (parser.type()) {
-        case NalParser::kSPSType:
+        case H264NalParser::kSPSType:
             // SPS found, copy to cache.
             ALOGV("Found SPS (length %zu)", parser.length());
             sps->resize(parser.length());
             memcpy(sps->data(), parser.data(), parser.length());
             foundStreamParams = true;
             break;
-        case NalParser::kPPSType:
+        case H264NalParser::kPPSType:
             // PPS found, copy to cache.
             ALOGV("Found PPS (length %zu)", parser.length());
             pps->resize(parser.length());
             memcpy(pps->data(), parser.data(), parser.length());
             foundStreamParams = true;
             break;
-        case NalParser::kIDRType:
+        case H264NalParser::kIDRType:
             ALOGV("Found IDR (length %zu)", parser.length());
             if (foundStreamParams) {
                 ALOGV("Not injecting SPS and PPS before IDR, already present");
diff --git a/common/FormatConverter.cpp b/common/FormatConverter.cpp
index d694bd1aab0e50a8c9dfba37dd776876c05515a6..cb1a0495c7ffd2e01f702690a8f7fdf7de0d7ce0 100644
--- a/common/FormatConverter.cpp
+++ b/common/FormatConverter.cpp
@@ -60,7 +60,7 @@ ImplDefinedToRGBXMap::~ImplDefinedToRGBXMap() {
 }
 
 // static
-std::unique_ptr<ImplDefinedToRGBXMap> ImplDefinedToRGBXMap::Create(
+std::unique_ptr<ImplDefinedToRGBXMap> ImplDefinedToRGBXMap::create(
         const C2ConstGraphicBlock& block) {
     uint32_t width, height, format, stride, igbpSlot, generation;
     uint64_t usage, igbpId;
@@ -92,7 +92,7 @@ std::unique_ptr<ImplDefinedToRGBXMap> ImplDefinedToRGBXMap::Create(
 }
 
 // static
-std::unique_ptr<FormatConverter> FormatConverter::Create(VideoPixelFormat outFormat,
+std::unique_ptr<FormatConverter> FormatConverter::create(VideoPixelFormat outFormat,
                                                          const ui::Size& visibleSize,
                                                          uint32_t inputCount,
                                                          const ui::Size& codedSize) {
@@ -115,78 +115,117 @@ c2_status_t FormatConverter::initialize(VideoPixelFormat outFormat, const ui::Si
           videoPixelFormatToString(outFormat).c_str(), visibleSize.width, visibleSize.height,
           inputCount, codedSize.width, codedSize.height);
 
-    std::shared_ptr<C2BlockPool> pool;
-    c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool);
+    mOutFormat = outFormat;
+    mVisibleSize = visibleSize;
+    mCodedSize = codedSize;
+
+    mTempPlaneU =
+            std::unique_ptr<uint8_t[]>(new uint8_t[mVisibleSize.width * mVisibleSize.height / 4]);
+    mTempPlaneV =
+            std::unique_ptr<uint8_t[]>(new uint8_t[mVisibleSize.width * mVisibleSize.height / 4]);
+
+    // Allocate graphic blocks for format conversion.
+    uint32_t requested_buffer_count = std::max(1u, inputCount);
+    c2_status_t status = allocateBuffers(requested_buffer_count);
     if (status != C2_OK) {
-        ALOGE("Failed to get basic graphic block pool (err=%d)", status);
+        ALOGE("Failed to allocate buffers (error: %d)", status);
         return status;
     }
 
+    return C2_OK;
+}
+
+c2_status_t FormatConverter::allocateBuffers(uint32_t count) {
+    ALOGV("Allocating %u buffers (format: %s, visible size: %dx%d, coded size: %dx%d)", count,
+          videoPixelFormatToString(mOutFormat).c_str(), mVisibleSize.width, mVisibleSize.height,
+          mCodedSize.width, mCodedSize.height);
+
     HalPixelFormat halFormat;
-    if (outFormat == VideoPixelFormat::I420) {
-        // Android HAL format doesn't have I420, we use YV12 instead and swap U and V data while
-        // conversion to perform I420.
+    if (mOutFormat == VideoPixelFormat::I420) {
+        // Android HAL format doesn't have I420, we use YV12 instead and swap U/V while converting.
         halFormat = HalPixelFormat::YV12;
     } else {
-        halFormat = HalPixelFormat::YCBCR_420_888;  // will allocate NV12 by minigbm.
+        halFormat = HalPixelFormat::YCBCR_420_888;  // Will allocate NV12 in minigbm.
+    }
+
+    std::shared_ptr<C2BlockPool> pool;
+    c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool);
+    if (status != C2_OK) {
+        ALOGE("Failed to get basic graphic block pool (error: %d)", status);
+        return C2_NO_MEMORY;
     }
 
-    uint32_t bufferCount = std::max(inputCount, kMinInputBufferCount);
-    for (uint32_t i = 0; i < bufferCount; i++) {
+    for (uint32_t i = 0; i < count; i++) {
         std::shared_ptr<C2GraphicBlock> block;
-        status = pool->fetchGraphicBlock(codedSize.width, codedSize.height,
+        status = pool->fetchGraphicBlock(mCodedSize.width, mCodedSize.height,
                                          static_cast<uint32_t>(halFormat),
                                          {(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
                                           static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)},
                                          &block);
         if (status != C2_OK) {
-            ALOGE("Failed to fetch graphic block (err=%d)", status);
-            return status;
+            ALOGE("Failed to fetch graphic block (error: %d)", status);
+            return C2_NO_MEMORY;
         }
         mGraphicBlocks.emplace_back(new BlockEntry(std::move(block)));
         mAvailableQueue.push(mGraphicBlocks.back().get());
     }
 
-    mOutFormat = outFormat;
-    mVisibleSize = visibleSize;
-
-    mTempPlaneU =
-            std::unique_ptr<uint8_t[]>(new uint8_t[mVisibleSize.width * mVisibleSize.height / 4]);
-    mTempPlaneV =
-            std::unique_ptr<uint8_t[]>(new uint8_t[mVisibleSize.width * mVisibleSize.height / 4]);
-
     return C2_OK;
 }
 
-C2ConstGraphicBlock FormatConverter::convertBlock(uint64_t frameIndex,
-                                                  const C2ConstGraphicBlock& inputBlock,
-                                                  c2_status_t* status) {
-    if (!isReady()) {
-        ALOGV("There is no available block for conversion");
-        *status = C2_NO_MEMORY;
-        return inputBlock;  // This is actually redundant and should not be used.
-    }
-
-    BlockEntry* entry = mAvailableQueue.front();
-    std::shared_ptr<C2GraphicBlock> outputBlock = entry->mBlock;
-
+c2_status_t FormatConverter::convertBlock(uint64_t frameIndex,
+                                          const C2ConstGraphicBlock& inputBlock,
+                                          C2ConstGraphicBlock* convertedBlock) {
     const C2GraphicView& inputView = inputBlock.map().get();
     C2PlanarLayout inputLayout = inputView.layout();
 
-    // The above layout() cannot fill layout information and memset 0 instead if the input format is
-    // IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by using
-    // ImplDefinedToRGBXMap in the case.
+    // Determine the input buffer pixel format.
+    VideoPixelFormat inputFormat = VideoPixelFormat::UNKNOWN;
     std::unique_ptr<ImplDefinedToRGBXMap> idMap;
-    if (static_cast<uint32_t>(inputLayout.type) == 0u) {
-        idMap = ImplDefinedToRGBXMap::Create(inputBlock);
-        if (idMap == nullptr) {
+    if (inputLayout.type == C2PlanarLayout::TYPE_YUV) {
+        if (inputLayout.rootPlanes == 3) {
+            inputFormat = VideoPixelFormat::YV12;
+        } else if (inputLayout.rootPlanes == 2) {
+            const uint8_t* const* data = inputView.data();
+            inputFormat = (data[C2PlanarLayout::PLANE_V] > data[C2PlanarLayout::PLANE_U])
+                                  ? VideoPixelFormat::NV12
+                                  : VideoPixelFormat::NV21;
+        }
+    } else if (inputLayout.type == C2PlanarLayout::TYPE_RGB) {
+        inputFormat = VideoPixelFormat::ABGR;
+    } else if (static_cast<uint32_t>(inputLayout.type) == 0u) {
+        // The above layout() cannot fill layout information and sets it to 0 instead if the input
+        // format is IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by
+        // using ImplDefinedToRGBXMap in this case.
+        idMap = ImplDefinedToRGBXMap::create(inputBlock);
+        if (!idMap) {
             ALOGE("Unable to parse RGBX_8888 from IMPLEMENTATION_DEFINED");
-            *status = C2_CORRUPTED;
-            return inputBlock;  // This is actually redundant and should not be used.
+            return C2_CORRUPTED;
         }
+        // There is only RGBA_8888 specified in C2AllocationGralloc::map(), no BGRA_8888. Maybe
+        // BGRA_8888 is not used now?
+        inputFormat = VideoPixelFormat::ABGR;
         inputLayout.type = C2PlanarLayout::TYPE_RGB;
+    } else {
+        ALOGE("Failed to determine input pixel format: %u", inputLayout.type);
+        return C2_CORRUPTED;
+    }
+
+    if (inputFormat == mOutFormat) {
+        ALOGV("Zero-Copy is applied");
+        mGraphicBlocks.emplace_back(new BlockEntry(frameIndex));
+        *convertedBlock = inputBlock;
+        return C2_OK;
+    }
+
+    if (!isReady()) {
+        ALOGV("There is no available block for conversion");
+        return C2_NO_MEMORY;
     }
 
+    BlockEntry* entry = mAvailableQueue.front();
+    std::shared_ptr<C2GraphicBlock> outputBlock = entry->mBlock;
+
     C2GraphicView outputView = outputBlock->map().get();
     C2PlanarLayout outputLayout = outputView.layout();
     uint8_t* dstY = outputView.data()[C2PlanarLayout::PLANE_Y];
@@ -198,8 +237,6 @@ C2ConstGraphicBlock FormatConverter::convertBlock(uint64_t frameIndex,
     const int dstStrideV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;   // only for I420
     const int dstStrideUV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;  // only for NV12
 
-    VideoPixelFormat inputFormat = VideoPixelFormat::UNKNOWN;
-    *status = C2_OK;
     if (inputLayout.type == C2PlanarLayout::TYPE_YUV) {
         const uint8_t* srcY = inputView.data()[C2PlanarLayout::PLANE_Y];
         const uint8_t* srcU = inputView.data()[C2PlanarLayout::PLANE_U];
@@ -207,17 +244,6 @@ C2ConstGraphicBlock FormatConverter::convertBlock(uint64_t frameIndex,
         const int srcStrideY = inputLayout.planes[C2PlanarLayout::PLANE_Y].rowInc;
         const int srcStrideU = inputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;
         const int srcStrideV = inputLayout.planes[C2PlanarLayout::PLANE_V].rowInc;
-        if (inputLayout.rootPlanes == 3) {
-            inputFormat = VideoPixelFormat::YV12;
-        } else if (inputLayout.rootPlanes == 2) {
-            inputFormat = (srcV > srcU) ? VideoPixelFormat::NV12 : VideoPixelFormat::NV21;
-        }
-
-        if (inputFormat == mOutFormat) {
-            ALOGV("Zero-Copy is applied");
-            mGraphicBlocks.emplace_back(new BlockEntry(frameIndex));
-            return inputBlock;
-        }
 
         switch (convertMap(inputFormat, mOutFormat)) {
         case convertMap(VideoPixelFormat::YV12, VideoPixelFormat::I420):
@@ -253,14 +279,9 @@ C2ConstGraphicBlock FormatConverter::convertBlock(uint64_t frameIndex,
             ALOGE("Unsupported pixel format conversion from %s to %s",
                   videoPixelFormatToString(inputFormat).c_str(),
                   videoPixelFormatToString(mOutFormat).c_str());
-            *status = C2_CORRUPTED;
-            return inputBlock;  // This is actually redundant and should not be used.
+            return C2_CORRUPTED;
         }
     } else if (inputLayout.type == C2PlanarLayout::TYPE_RGB) {
-        // There is only RGBA_8888 specified in C2AllocationGralloc::map(), no BGRA_8888. Maybe
-        // BGRA_8888 is not used now?
-        inputFormat = VideoPixelFormat::ABGR;
-
         const uint8_t* srcRGB = (idMap) ? idMap->addr() : inputView.data()[C2PlanarLayout::PLANE_R];
         const int srcStrideRGB =
                 (idMap) ? idMap->rowInc() : inputLayout.planes[C2PlanarLayout::PLANE_R].rowInc;
@@ -287,20 +308,21 @@ C2ConstGraphicBlock FormatConverter::convertBlock(uint64_t frameIndex,
             ALOGE("Unsupported pixel format conversion from %s to %s",
                   videoPixelFormatToString(inputFormat).c_str(),
                   videoPixelFormatToString(mOutFormat).c_str());
-            *status = C2_CORRUPTED;
-            return inputBlock;  // This is actually redundant and should not be used.
+            return C2_CORRUPTED;
         }
     } else {
         ALOGE("Unsupported input layout type");
-        *status = C2_CORRUPTED;
-        return inputBlock;  // This is actually redundant and should not be used.
+        return C2_CORRUPTED;
     }
 
     ALOGV("convertBlock(frame_index=%" PRIu64 ", format=%s)", frameIndex,
           videoPixelFormatToString(inputFormat).c_str());
     entry->mAssociatedFrameIndex = frameIndex;
     mAvailableQueue.pop();
-    return outputBlock->share(C2Rect(mVisibleSize.width, mVisibleSize.height), C2Fence());
+
+    *convertedBlock =
+            outputBlock->share(C2Rect(mVisibleSize.width, mVisibleSize.height), C2Fence());
+    return C2_OK;
 }
 
 c2_status_t FormatConverter::returnBlock(uint64_t frameIndex) {
diff --git a/common/H264NalParser.cpp b/common/H264NalParser.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..d2949c50d9c464999ce615f427cfbbf015f50c1f
--- /dev/null
+++ b/common/H264NalParser.cpp
@@ -0,0 +1,227 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NalParser"
+
+#include <v4l2_codec2/common/H264NalParser.h>
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <utils/Log.h>
+
+namespace android {
+
+namespace {
+
+enum H264ProfileIDC {
+    kProfileIDCAVLC444 = 44,
+    kProfileIDScalableBaseline = 83,
+    kProfileIDScalableHigh = 86,
+    kProfileIDCHigh = 100,
+    kProfileIDHigh10 = 110,
+    kProfileIDSMultiviewHigh = 118,
+    kProfileIDHigh422 = 122,
+    kProfileIDStereoHigh = 128,
+    kProfileIDHigh444Predictive = 244,
+};
+
+constexpr uint32_t kYUV444Idc = 3;
+
+// Skip a H.264 sequence scaling list in the specified bitstream.
+bool skipScalingList(ABitReader* br, size_t scalingListSize) {
+    size_t nextScale = 8;
+    size_t lastScale = 8;
+    for (size_t j = 0; j < scalingListSize; ++j) {
+        if (nextScale != 0) {
+            int32_t deltaScale;
+            if (!NalParser::parseSE(br, &deltaScale)) return false;  // delta_sl
+            if (deltaScale < -128) {
+                ALOGW("delta scale (%d) is below range, capping to -128", deltaScale);
+                deltaScale = -128;
+            } else if (deltaScale > 127) {
+                ALOGW("delta scale (%d) is above range, capping to 127", deltaScale);
+                deltaScale = 127;
+            }
+            nextScale = (lastScale + (deltaScale + 256)) % 256;
+        }
+        lastScale = (nextScale == 0) ? lastScale : nextScale;
+    }
+    return true;
+}
+
+// Skip the H.264 sequence scaling matrix in the specified bitstream.
+bool skipScalingMatrix(ABitReader* br, size_t numScalingLists) {
+    for (size_t i = 0; i < numScalingLists; ++i) {
+        uint32_t seq_scaling_list_present_flag;
+        if (!br->getBitsGraceful(1, &seq_scaling_list_present_flag))
+            return false;  // seq_scaling_list_present_flag
+        if (seq_scaling_list_present_flag) {
+            if (i < 6) {
+                if (!skipScalingList(br, 16)) return false;
+            } else {
+                if (!skipScalingList(br, 64)) return false;
+            }
+        }
+    }
+    return true;
+}
+
+}  // namespace
+
+H264NalParser::H264NalParser(const uint8_t* data, size_t length) : NalParser(data, length) {}
+
+bool H264NalParser::locateSPS() {
+    while (locateNextNal()) {
+        if (length() == 0) continue;
+        if (type() != kSPSType) continue;
+        return true;
+    }
+
+    return false;
+}
+
+bool H264NalParser::locateIDR() {
+    while (locateNextNal()) {
+        if (length() == 0) continue;
+        if (type() != kIDRType) continue;
+        return true;
+    }
+
+    return false;
+}
+
+uint8_t H264NalParser::type() const {
+    // First byte is forbidden_zero_bit (1) + nal_ref_idc (2) + nal_unit_type (5)
+    constexpr uint8_t kNALTypeMask = 0x1f;
+    return *mCurrNalDataPos & kNALTypeMask;
+}
+
+bool H264NalParser::findCodedColorAspects(ColorAspects* colorAspects) {
+    ALOG_ASSERT(colorAspects);
+    ALOG_ASSERT(type() == kSPSType);
+
+    // Unfortunately we can't directly jump to the Video Usability Information (VUI) parameters that
+    // contain the color aspects. We need to parse the entire SPS header up until the values we
+    // need.
+
+    // Skip first byte containing type.
+    NALBitReader br(mCurrNalDataPos + 1, length() - 1);
+
+    uint32_t unused;
+    uint32_t profileIDC;
+    if (!br.getBitsGraceful(8, &profileIDC)) return false;  // profile_idc
+    br.skipBits(16);        // constraint flags + reserved bits + level_idc
+    parseUE(&br, &unused);  // seq_parameter_set_id
+
+    if (profileIDC == kProfileIDCHigh || profileIDC == kProfileIDHigh10 ||
+        profileIDC == kProfileIDHigh422 || profileIDC == kProfileIDHigh444Predictive ||
+        profileIDC == kProfileIDCAVLC444 || profileIDC == kProfileIDScalableBaseline ||
+        profileIDC == kProfileIDScalableHigh || profileIDC == kProfileIDSMultiviewHigh ||
+        profileIDC == kProfileIDStereoHigh) {
+        uint32_t chromaFormatIDC;
+        if (!parseUE(&br, &chromaFormatIDC)) return false;
+        if (chromaFormatIDC == kYUV444Idc) {  // chroma_format_idc
+            br.skipBits(1);                   // separate_colour_plane_flag
+        }
+
+        parseUE(&br, &unused);  // bit_depth_luma_minus8
+        parseUE(&br, &unused);  // bit_depth_chroma_minus8
+        br.skipBits(1);         // lossless_qpprime_y_zero_flag
+
+        uint32_t seqScalingMatrixPresentFlag;
+        if (!br.getBitsGraceful(1, &seqScalingMatrixPresentFlag))
+            return false;  // seq_scaling_matrix_present_flag
+        if (seqScalingMatrixPresentFlag) {
+            const size_t numScalingLists = (chromaFormatIDC != kYUV444Idc) ? 8 : 12;
+            if (!skipScalingMatrix(&br, numScalingLists)) return false;
+        }
+    }
+
+    parseUE(&br, &unused);  // log2_max_frame_num_minus4
+    uint32_t pictureOrderCountType;
+    if (!parseUE(&br, &pictureOrderCountType)) return false;  // pic_order_cnt_type
+    if (pictureOrderCountType == 0) {
+        parseUE(&br, &unused);  // log2_max_pic_order_cnt_lsb_minus4
+    } else if (pictureOrderCountType == 1) {
+        br.skipBits(1);  // delta_pic_order_always_zero_flag
+        int32_t unused_i;
+        parseSE(&br, &unused_i);  // offset_for_non_ref_pic
+        parseSE(&br, &unused_i);  // offset_for_top_to_bottom_field
+        uint32_t numReferenceFrames;
+        if (!parseUE(&br, &numReferenceFrames))
+            return false;  // num_ref_frames_in_pic_order_cnt_cycle
+        for (uint32_t i = 0; i < numReferenceFrames; ++i) {
+            parseUE(&br, &unused);  // offset_for_ref_frame
+        }
+    }
+
+    parseUE(&br, &unused);  // num_ref_frames
+    br.skipBits(1);         // gaps_in_frame_num_value_allowed_flag
+    parseUE(&br, &unused);  // pic_width_in_mbs_minus1
+    parseUE(&br, &unused);  // pic_height_in_map_units_minus1
+    uint32_t frameMbsOnlyFlag;
+    if (!br.getBitsGraceful(1, &frameMbsOnlyFlag)) return false;  // frame_mbs_only_flag
+    if (!frameMbsOnlyFlag) {
+        br.skipBits(1);  // mb_adaptive_frame_field_flag
+    }
+    br.skipBits(1);  // direct_8x8_inference_flag
+
+    uint32_t frameCroppingFlag;
+    if (!br.getBitsGraceful(1, &frameCroppingFlag)) return false;  // frame_cropping_flag
+    if (frameCroppingFlag) {
+        parseUE(&br, &unused);  // frame_cropping_rect_left_offset
+        parseUE(&br, &unused);  // frame_cropping_rect_right_offset
+        parseUE(&br, &unused);  // frame_cropping_rect_top_offset
+        parseUE(&br, &unused);  // frame_cropping_rect_bottom_offset
+    }
+
+    uint32_t vuiParametersPresentFlag;
+    if (!br.getBitsGraceful(1, &vuiParametersPresentFlag))
+        return false;  // vui_parameters_present_flag
+    if (vuiParametersPresentFlag) {
+        uint32_t aspectRatioInfoPresentFlag;
+        if (!br.getBitsGraceful(1, &aspectRatioInfoPresentFlag))
+            return false;  // VUI aspect_ratio_info_present_flag
+        if (aspectRatioInfoPresentFlag) {
+            uint32_t aspectRatioIdc;
+            if (!br.getBitsGraceful(8, &aspectRatioIdc)) return false;  // VUI aspect_ratio_idc
+            if (aspectRatioIdc == 255) {  // VUI aspect_ratio_idc == extended sample aspect ratio
+                br.skipBits(32);          // VUI sar_width + sar_height
+            }
+        }
+
+        uint32_t overscanInfoPresentFlag;
+        if (!br.getBitsGraceful(1, &overscanInfoPresentFlag))
+            return false;  // VUI overscan_info_present_flag
+        if (overscanInfoPresentFlag) {
+            br.skipBits(1);  // VUI overscan_appropriate_flag
+        }
+        uint32_t videoSignalTypePresentFlag;
+        if (!br.getBitsGraceful(1, &videoSignalTypePresentFlag))
+            return false;  // VUI video_signal_type_present_flag
+        if (videoSignalTypePresentFlag) {
+            br.skipBits(3);  // VUI video_format
+            uint32_t videoFullRangeFlag;
+            if (!br.getBitsGraceful(1, &videoFullRangeFlag))
+                return false;  // VUI videoFullRangeFlag
+            colorAspects->fullRange = videoFullRangeFlag;
+            uint32_t color_description_present_flag;
+            if (!br.getBitsGraceful(1, &color_description_present_flag))
+                return false;  // VUI color_description_present_flag
+            if (color_description_present_flag) {
+                if (!br.getBitsGraceful(8, &colorAspects->primaries))
+                    return false;  // VUI colour_primaries
+                if (!br.getBitsGraceful(8, &colorAspects->transfer))
+                    return false;  // VUI transfer_characteristics
+                if (!br.getBitsGraceful(8, &colorAspects->coeffs))
+                    return false;  // VUI matrix_coefficients
+                return true;
+            }
+        }
+    }
+
+    return false;  // The NAL unit doesn't contain color aspects info.
+}
+
+}  // namespace android
diff --git a/common/HEVCNalParser.cpp b/common/HEVCNalParser.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..89c67f7e06ba50b9a80e5ffb07cbe00a8d0fba96
--- /dev/null
+++ b/common/HEVCNalParser.cpp
@@ -0,0 +1,366 @@
+// Copyright 2022 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NalParser"
+
+#include <v4l2_codec2/common/HEVCNalParser.h>
+
+#include <algorithm>
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <utils/Log.h>
+
+namespace android {
+
+namespace {
+
+constexpr uint32_t kMaxShortTermRefPicSets = 64;
+
+struct StRefPicSet {
+    // Syntax elements.
+    int numNegativePics;
+    int numPositivePics;
+    int deltaPocS0[kMaxShortTermRefPicSets];
+    int deltaPocS1[kMaxShortTermRefPicSets];
+
+    // Calculated fields.
+    int numDeltaPocs;
+};
+
+// Skip an HEVC ProfileTierLevel in the specified bitstream.
+bool skipProfileTierLevel(ABitReader* br, uint32_t spsMaxSublayersMinus1) {
+    // general_profile_space(2), general_tier_flag(1), general_profile_idc(5),
+    // general_profile_compatibility_flag(32), general_progressive_source_flag(1),
+    // general_interlaced_source_flag(1), general_non_packed_constraint_flag(1),
+    // general_frame_only_constraint_flag(1), compatibility_flags(43), general_inbld_flag(1),
+    // general_level_idc(8)
+    br->skipBits(96);
+    if (spsMaxSublayersMinus1 > 6) return false;
+    uint32_t subLayerProfilePresentFlag[6];
+    uint32_t subLayerLevelPresentFlag[6];
+    for (uint32_t i = 0; i < spsMaxSublayersMinus1; ++i) {
+        if (!br->getBitsGraceful(1, &subLayerProfilePresentFlag[i])) return false;
+        if (!br->getBitsGraceful(1, &subLayerLevelPresentFlag[i])) return false;
+    }
+    if (spsMaxSublayersMinus1 > 0) {
+        br->skipBits(2 * (8 - spsMaxSublayersMinus1));
+    }
+    for (uint32_t i = 0; i < spsMaxSublayersMinus1; ++i) {
+        if (subLayerProfilePresentFlag[i]) {
+            // sub_layer_profile_space(2), sub_layer_tier_flag(1), sub_layer_profile_idc(5),
+            // sub_layer_profile_compatibility_flag(32),  sub_layer_progressive_source_flag(1),
+            // sub_layer_interlaced_source_flag(1), sub_layer_non_packed_constraint_flag(1),
+            // sub_layer_frame_only_constraint_flag(1), compatibility_flags(43),
+            // sub_layer_inbld_flag(1)
+            br->skipBits(88);
+        }
+        if (subLayerLevelPresentFlag[i]) {
+            br->skipBits(8);  // sub_layer_level_idc
+        }
+    }
+    return true;
+}
+
+// Skip an HEVC ScalingListData in the specified bitstream.
+bool skipScalingListData(ABitReader* br) {
+    for (int sizeId = 0; sizeId < 4; ++sizeId) {
+        for (int matrixId = 0; matrixId < 6; matrixId += (sizeId == 3) ? 3 : 1) {
+            uint32_t scalingListPredModeFlag;
+            if (!br->getBitsGraceful(1, &scalingListPredModeFlag)) return false;
+            if (!scalingListPredModeFlag) {
+                uint32_t unused;
+                NalParser::parseUE(br, &unused);  // scaling_list_pred_matrix_id_delta
+            } else {
+                int32_t unused;
+                if (sizeId > 1)
+                    NalParser::parseSE(br, &unused);  // scaling_list_dc_coef_16x16 or 32x32
+                int coefNum = std::min(64, (1 << (4 + (sizeId << 1))));
+                for (int i = 0; i < coefNum; ++i)
+                    NalParser::parseSE(br, &unused);  // scaling_list_delta_coef
+            }
+        }
+    }
+    return true;
+}
+
+// Skip an HEVC StRefPicSet in the specified bitstream.
+bool skipStRefPicSet(ABitReader* br, uint32_t stRpsIdx, uint32_t numShortTermRefPicSets,
+                     StRefPicSet* allRefPicSets, StRefPicSet* currRefPicSet) {
+    uint32_t interRefPicSetPredictionFlag = 0;
+    if (stRpsIdx != 0) {
+        if (!br->getBitsGraceful(1, &interRefPicSetPredictionFlag)) return false;
+    }
+    if (interRefPicSetPredictionFlag) {
+        uint32_t deltaIdxMinus1 = 0;
+        if (stRpsIdx == numShortTermRefPicSets) {
+            if (!NalParser::parseUE(br, &deltaIdxMinus1)) return false;
+            if (deltaIdxMinus1 + 1 > stRpsIdx) {
+                ALOGW("deltaIdxMinus1 is out of range");
+                return false;
+            }
+        }
+        int refRpsIdx = stRpsIdx - (static_cast<int>(deltaIdxMinus1) + 1);
+        uint32_t deltaRpsSign;
+        uint32_t absDeltaRpsMinus1;
+        if (!br->getBitsGraceful(1, &deltaRpsSign)) return false;
+        if (!NalParser::parseUE(br, &absDeltaRpsMinus1)) return false;
+        int deltaRps = (1 - 2 * static_cast<int>(deltaRpsSign)) *
+                       (static_cast<int>(absDeltaRpsMinus1) + 1);
+        const StRefPicSet& refSet = allRefPicSets[refRpsIdx];
+        uint32_t useDeltaFlag[kMaxShortTermRefPicSets];
+        // useDeltaFlag defaults to 1 if not present.
+        std::fill_n(useDeltaFlag, kMaxShortTermRefPicSets, 1);
+
+        for (int j = 0; j <= refSet.numDeltaPocs; j++) {
+            uint32_t usedByCurrPicFlag;
+            if (!br->getBitsGraceful(1, &usedByCurrPicFlag)) return false;
+            if (!usedByCurrPicFlag)
+                if (!br->getBitsGraceful(1, &useDeltaFlag[j])) return false;
+        }
+        int i = 0;
+        for (int j = refSet.numPositivePics - 1; j >= 0; --j) {
+            int dPoc = refSet.deltaPocS1[j] + deltaRps;
+            if (dPoc < 0 && useDeltaFlag[refSet.numNegativePics + j])
+                currRefPicSet->deltaPocS0[i++] = dPoc;
+        }
+        if (deltaRps < 0 && useDeltaFlag[refSet.numDeltaPocs]) {
+            currRefPicSet->deltaPocS0[i++] = deltaRps;
+        }
+        for (int j = 0; j < refSet.numNegativePics; ++j) {
+            int dPoc = refSet.deltaPocS0[j] + deltaRps;
+            if (dPoc < 0 && useDeltaFlag[j]) currRefPicSet->deltaPocS0[i++] = dPoc;
+        }
+        currRefPicSet->numNegativePics = i;
+        i = 0;
+        for (int j = refSet.numNegativePics - 1; j >= 0; --j) {
+            int dPoc = refSet.deltaPocS0[j] + deltaRps;
+            if (dPoc > 0 && useDeltaFlag[j]) currRefPicSet->deltaPocS1[i++] = dPoc;
+        }
+        if (deltaRps > 0 && useDeltaFlag[refSet.numDeltaPocs])
+            currRefPicSet->deltaPocS1[i++] = deltaRps;
+        for (int j = 0; j < refSet.numPositivePics; ++j) {
+            int dPoc = refSet.deltaPocS1[j] + deltaRps;
+            if (dPoc > 0 && useDeltaFlag[refSet.numNegativePics + j])
+                currRefPicSet->deltaPocS1[i++] = dPoc;
+        }
+        currRefPicSet->numPositivePics = i;
+    } else {
+        uint32_t uintForRead;
+        if (!NalParser::parseUE(br, &uintForRead)) return false;
+        currRefPicSet->numNegativePics = static_cast<int>(uintForRead);
+        if (!NalParser::parseUE(br, &uintForRead)) return false;
+        currRefPicSet->numPositivePics = static_cast<int>(uintForRead);
+        if (currRefPicSet->numNegativePics > kMaxShortTermRefPicSets ||
+            currRefPicSet->numPositivePics > kMaxShortTermRefPicSets) {
+            ALOGW("num_negative_pics or num_positive_pics is out of range");
+            return false;
+        }
+        for (int i = 0; i < currRefPicSet->numNegativePics; ++i) {
+            uint32_t deltaPocS0Minus1;
+            if (!NalParser::parseUE(br, &deltaPocS0Minus1)) return false;
+            if (i == 0) {
+                currRefPicSet->deltaPocS0[i] = -(static_cast<int>(deltaPocS0Minus1) + 1);
+            } else {
+                currRefPicSet->deltaPocS0[i] =
+                        currRefPicSet->deltaPocS0[i - 1] - (static_cast<int>(deltaPocS0Minus1) + 1);
+            }
+            br->skipBits(1);  // used_by_curr_pic_s0
+        }
+        for (int i = 0; i < currRefPicSet->numPositivePics; ++i) {
+            uint32_t deltaPocS1Minus1;
+            if (!NalParser::parseUE(br, &deltaPocS1Minus1)) return false;
+            if (i == 0) {
+                currRefPicSet->deltaPocS1[i] = static_cast<int>(deltaPocS1Minus1) + 1;
+            } else {
+                currRefPicSet->deltaPocS1[i] =
+                        currRefPicSet->deltaPocS1[i - 1] + static_cast<int>(deltaPocS1Minus1) + 1;
+            }
+            br->skipBits(1);  // used_by_curr_pic_s1
+        }
+    }
+    currRefPicSet->numDeltaPocs = currRefPicSet->numNegativePics + currRefPicSet->numPositivePics;
+    if (currRefPicSet->numDeltaPocs > kMaxShortTermRefPicSets) {
+        ALOGW("numDeltaPocs is out of range");
+        return false;
+    }
+    return true;
+}
+
+}  // namespace
+
+HEVCNalParser::HEVCNalParser(const uint8_t* data, size_t length) : NalParser(data, length) {}
+
+bool HEVCNalParser::locateSPS() {
+    while (locateNextNal()) {
+        if (length() == 0) continue;
+        if (type() != kSPSType) continue;
+        return true;
+    }
+
+    return false;
+}
+
+bool HEVCNalParser::locateIDR() {
+    while (locateNextNal()) {
+        if (length() == 0) continue;
+        if (type() != kIDRType) continue;
+        return true;
+    }
+
+    return false;
+}
+
+uint8_t HEVCNalParser::type() const {
+    // First bit is forbidden_zero_bit, next 6 are nal_unit_type
+    constexpr uint8_t kNALTypeMask = 0x7e;
+    return (*mCurrNalDataPos & kNALTypeMask) >> 1;
+}
+
+bool HEVCNalParser::findCodedColorAspects(ColorAspects* colorAspects) {
+    ALOG_ASSERT(colorAspects);
+    ALOG_ASSERT(type() == kSPSType);
+
+    // Unfortunately we can't directly jump to the Video Usability Information (VUI) parameters that
+    // contain the color aspects. We need to parse the entire SPS header up until the values we
+    // need.
+    // Skip first 2 bytes for the NALU header.
+    if (length() <= 2) return false;
+    NALBitReader br(mCurrNalDataPos + 2, length() - 2);
+
+    br.skipBits(4);  // sps_video_parameter_set_id
+    uint32_t spsMaxSublayersMinus1;
+    if (!br.getBitsGraceful(3, &spsMaxSublayersMinus1)) return false;
+    br.skipBits(1);  // sps_temporal_id_nesting_flag
+
+    if (!skipProfileTierLevel(&br, spsMaxSublayersMinus1)) return false;
+
+    uint32_t unused;
+    parseUE(&br, &unused);  // sps_seq_parameter_set_id
+    uint32_t chromaFormatIdc;
+    if (!parseUE(&br, &chromaFormatIdc)) return false;
+    if (chromaFormatIdc == 3) br.skipBits(1);  // separate_colour_plane_flag
+    parseUE(&br, &unused);                     // pic_width_in_luma_samples
+    parseUE(&br, &unused);                     // pic_height_in_luma_samples
+
+    uint32_t conformanceWindowFlag;
+    if (!br.getBitsGraceful(1, &conformanceWindowFlag)) return false;
+    if (conformanceWindowFlag) {
+        parseUE(&br, &unused);  // conf_win_left_offset
+        parseUE(&br, &unused);  // conf_win_right_offset
+        parseUE(&br, &unused);  // conf_win_top_offset
+        parseUE(&br, &unused);  // conf_win_bottom_offset
+    }
+    parseUE(&br, &unused);  // bit_depth_luma_minus8
+    parseUE(&br, &unused);  // bit_depth_chroma_minus8
+    uint32_t log2MaxPicOrderCntLsbMinus4;
+    if (!parseUE(&br, &log2MaxPicOrderCntLsbMinus4)) return false;
+
+    uint32_t spsSubLayerOrderingInfoPresentFlag;
+    if (!br.getBitsGraceful(1, &spsSubLayerOrderingInfoPresentFlag)) return false;
+    for (uint32_t i = spsSubLayerOrderingInfoPresentFlag ? 0 : spsMaxSublayersMinus1;
+         i <= spsMaxSublayersMinus1; ++i) {
+        parseUE(&br, &unused);  // sps_max_dec_pic_buffering_minus1
+        parseUE(&br, &unused);  // sps_max_num_reorder_pics
+        parseUE(&br, &unused);  // sps_max_latency_increase_plus1
+    }
+    parseUE(&br, &unused);  // log2_min_luma_coding_block_size_minus3
+    parseUE(&br, &unused);  // log2_diff_max_min_luma_coding_block_size
+    parseUE(&br, &unused);  // log2_min_luma_transform_block_size_minus2
+    parseUE(&br, &unused);  // log2_diff_max_min_luma_transform_block_size
+    parseUE(&br, &unused);  // max_transform_hierarchy_depth_inter
+    parseUE(&br, &unused);  // max_transform_hierarchy_depth_intra
+    uint32_t scalingListEnabledFlag;
+    if (!br.getBitsGraceful(1, &scalingListEnabledFlag)) return false;
+    if (scalingListEnabledFlag) {
+        uint32_t spsScalingListDataPresentFlag;
+        if (!br.getBitsGraceful(1, &spsScalingListDataPresentFlag)) return false;
+        if (spsScalingListDataPresentFlag) {
+            if (!skipScalingListData(&br)) return false;
+        }
+    }
+
+    br.skipBits(2);  // amp_enabled_flag(1), sample_adaptive_offset_enabled_flag(1)
+    uint32_t pcmEnabledFlag;
+    if (!br.getBitsGraceful(1, &pcmEnabledFlag)) return false;
+    if (pcmEnabledFlag) {
+        // pcm_sample_bit_depth_luma_minus1(4), pcm_sample_bit_depth_chroma_minus1(4)
+        br.skipBits(8);
+        parseUE(&br, &unused);  // log2_min_pcm_luma_coding_block_size_minus3
+        parseUE(&br, &unused);  // log2_diff_max_min_pcm_luma_coding_block_size
+        br.skipBits(1);         // pcm_loop_filter_disabled_flag
+    }
+
+    uint32_t numShortTermRefPicSets;
+    if (!parseUE(&br, &numShortTermRefPicSets)) return false;
+    if (numShortTermRefPicSets > kMaxShortTermRefPicSets) {
+        ALOGW("numShortTermRefPicSets out of range");
+        return false;
+    }
+    StRefPicSet allRefPicSets[kMaxShortTermRefPicSets];
+    memset(allRefPicSets, 0, sizeof(StRefPicSet) * kMaxShortTermRefPicSets);
+    for (uint32_t i = 0; i < numShortTermRefPicSets; ++i) {
+        if (!skipStRefPicSet(&br, i, numShortTermRefPicSets, allRefPicSets, &allRefPicSets[i]))
+            return false;
+    }
+
+    uint32_t longTermRefPicsPresentFlag;
+    if (!br.getBitsGraceful(1, &longTermRefPicsPresentFlag)) return false;
+    if (longTermRefPicsPresentFlag) {
+        uint32_t numLongTermRefPicsSps;
+        if (!parseUE(&br, &numLongTermRefPicsSps)) return false;
+        for (uint32_t i = 0; i < numLongTermRefPicsSps; ++i) {
+            // lt_ref_pic_poc_lsb_sps
+            if (!br.getBitsGraceful(log2MaxPicOrderCntLsbMinus4 + 4, &unused)) return false;
+            if (!br.getBitsGraceful(1, &unused)) return false;  // used_by_curr_pic_lt_sps_flag
+        }
+    }
+    // sps_temporal_mvp_enabled_flag(1), strong_intra_smoothing_enabled_flag(1)
+    br.skipBits(2);
+    uint32_t vuiParametersPresentFlag;
+    if (!br.getBitsGraceful(1, &vuiParametersPresentFlag)) return false;
+    if (vuiParametersPresentFlag) {
+        uint32_t aspectRatioInfoPresentFlag;
+        if (!br.getBitsGraceful(1, &aspectRatioInfoPresentFlag))
+            return false;  // VUI aspect_ratio_info_present_flag
+        if (aspectRatioInfoPresentFlag) {
+            uint32_t aspectRatioIdc;
+            if (!br.getBitsGraceful(8, &aspectRatioIdc)) return false;  // VUI aspect_ratio_idc
+            if (aspectRatioIdc == 255) {  // VUI aspect_ratio_idc == extended sample aspect ratio
+                br.skipBits(32);          // VUI sar_width + sar_height
+            }
+        }
+
+        uint32_t overscanInfoPresentFlag;
+        if (!br.getBitsGraceful(1, &overscanInfoPresentFlag))
+            return false;                             // VUI overscan_info_present_flag
+        if (overscanInfoPresentFlag) br.skipBits(1);  // VUI overscan_appropriate_flag
+        uint32_t videoSignalTypePresentFlag;
+        if (!br.getBitsGraceful(1, &videoSignalTypePresentFlag))
+            return false;  // VUI video_signal_type_present_flag
+        if (videoSignalTypePresentFlag) {
+            br.skipBits(3);  // VUI video_format
+            uint32_t videoFullRangeFlag;
+            if (!br.getBitsGraceful(1, &videoFullRangeFlag))
+                return false;  // VUI videoFullRangeFlag
+            colorAspects->fullRange = videoFullRangeFlag;
+            uint32_t color_description_present_flag;
+            if (!br.getBitsGraceful(1, &color_description_present_flag))
+                return false;  // VUI color_description_present_flag
+            if (color_description_present_flag) {
+                if (!br.getBitsGraceful(8, &colorAspects->primaries))
+                    return false;  // VUI colour_primaries
+                if (!br.getBitsGraceful(8, &colorAspects->transfer))
+                    return false;  // VUI transfer_characteristics
+                if (!br.getBitsGraceful(8, &colorAspects->coeffs))
+                    return false;  // VUI matrix_coefficients
+                return true;
+            }
+        }
+    }
+
+    return false;  // The NAL unit doesn't contain color aspects info.
+}
+
+}  // namespace android
diff --git a/common/NalParser.cpp b/common/NalParser.cpp
index 1df95d4e62899bb093fbe96125b6699e0c56e047..60ed83657ede972b5ae60b46914f3058f8c335da 100644
--- a/common/NalParser.cpp
+++ b/common/NalParser.cpp
@@ -5,94 +5,14 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NalParser"
 
-#include <v4l2_codec2/common/NalParser.h>
-
 #include <algorithm>
 
+#include <v4l2_codec2/common/NalParser.h>
+
 #include <media/stagefright/foundation/ABitReader.h>
-#include <utils/Log.h>
 
 namespace android {
 
-namespace {
-
-enum H264ProfileIDC {
-    kProfileIDCAVLC444 = 44,
-    kProfileIDScalableBaseline = 83,
-    kProfileIDScalableHigh = 86,
-    kProfileIDCHigh = 100,
-    kProfileIDHigh10 = 110,
-    kProfileIDSMultiviewHigh = 118,
-    kProfileIDHigh422 = 122,
-    kProfileIDStereoHigh = 128,
-    kProfileIDHigh444Predictive = 244,
-};
-
-constexpr uint32_t kYUV444Idc = 3;
-
-// Read unsigned int encoded with exponential-golomb.
-bool parseUE(ABitReader* br, uint32_t* val) {
-    uint32_t numZeroes = 0;
-    uint32_t bit;
-    if (!br->getBitsGraceful(1, &bit)) return false;
-    while (bit == 0) {
-        ++numZeroes;
-        if (!br->getBitsGraceful(1, &bit)) return false;
-    }
-    if (!br->getBitsGraceful(numZeroes, val)) return false;
-    *val += (1u << numZeroes) - 1;
-    return true;
-}
-
-// Read signed int encoded with exponential-golomb.
-bool parseSE(ABitReader* br, int32_t* val) {
-    uint32_t codeNum;
-    if (!parseUE(br, &codeNum)) return false;
-    *val = (codeNum & 1) ? (codeNum + 1) >> 1 : -static_cast<int32_t>(codeNum >> 1);
-    return true;
-}
-
-// Skip a H.264 sequence scaling list in the specified bitstream.
-bool skipScalingList(ABitReader* br, size_t scalingListSize) {
-    size_t nextScale = 8;
-    size_t lastScale = 8;
-    for (size_t j = 0; j < scalingListSize; ++j) {
-        if (nextScale != 0) {
-            int32_t deltaScale;
-            if (!parseSE(br, &deltaScale)) return false;  // delta_sl
-            if (deltaScale < -128) {
-                ALOGW("delta scale (%d) is below range, capping to -128", deltaScale);
-                deltaScale = -128;
-            } else if (deltaScale > 127) {
-                ALOGW("delta scale (%d) is above range, capping to 127", deltaScale);
-                deltaScale = 127;
-            }
-            nextScale = (lastScale + (deltaScale + 256)) % 256;
-        }
-        lastScale = (nextScale == 0) ? lastScale : nextScale;
-    }
-    return true;
-}
-
-// Skip the H.264 sequence scaling matrix in the specified bitstream.
-bool skipScalingMatrix(ABitReader* br, size_t numScalingLists) {
-    for (size_t i = 0; i < numScalingLists; ++i) {
-        uint32_t seq_scaling_list_present_flag;
-        if (!br->getBitsGraceful(1, &seq_scaling_list_present_flag))
-            return false;  // seq_scaling_list_present_flag
-        if (seq_scaling_list_present_flag) {
-            if (i < 6) {
-                if (!skipScalingList(br, 16)) return false;
-            } else {
-                if (!skipScalingList(br, 64)) return false;
-            }
-        }
-    }
-    return true;
-}
-
-}  // namespace
-
 NalParser::NalParser(const uint8_t* data, size_t length)
       : mCurrNalDataPos(data), mDataEnd(data + length) {
     mNextNalStartCodePos = findNextStartCodePos();
@@ -105,16 +25,6 @@ bool NalParser::locateNextNal() {
     return true;
 }
 
-bool NalParser::locateSPS() {
-    while (locateNextNal()) {
-        if (length() == 0) continue;
-        if (type() != kSPSType) continue;
-        return true;
-    }
-
-    return false;
-}
-
 const uint8_t* NalParser::data() const {
     return mCurrNalDataPos;
 }
@@ -126,142 +36,31 @@ size_t NalParser::length() const {
     return *(mNextNalStartCodePos - 1) == 0x00 ? length - 1 : length;
 }
 
-uint8_t NalParser::type() const {
-    // First byte is forbidden_zero_bit (1) + nal_ref_idc (2) + nal_unit_type (5)
-    constexpr uint8_t kNALTypeMask = 0x1f;
-    return *mCurrNalDataPos & kNALTypeMask;
-}
-
 const uint8_t* NalParser::findNextStartCodePos() const {
     return std::search(mCurrNalDataPos, mDataEnd, kNalStartCode,
                        kNalStartCode + kNalStartCodeLength);
 }
 
-bool NalParser::findCodedColorAspects(ColorAspects* colorAspects) {
-    ALOG_ASSERT(colorAspects);
-    ALOG_ASSERT(type() == kSPSType);
-
-    // Unfortunately we can't directly jump to the Video Usability Information (VUI) parameters that
-    // contain the color aspects. We need to parse the entire SPS header up until the values we
-    // need.
-
-    // Skip first byte containing type.
-    ABitReader br(mCurrNalDataPos + 1, length() - 1);
-
-    uint32_t unused;
-    uint32_t profileIDC;
-    if (!br.getBitsGraceful(8, &profileIDC)) return false;  // profile_idc
-    br.skipBits(16);        // constraint flags + reserved bits + level_idc
-    parseUE(&br, &unused);  // seq_parameter_set_id
-
-    if (profileIDC == kProfileIDCHigh || profileIDC == kProfileIDHigh10 ||
-        profileIDC == kProfileIDHigh422 || profileIDC == kProfileIDHigh444Predictive ||
-        profileIDC == kProfileIDCAVLC444 || profileIDC == kProfileIDScalableBaseline ||
-        profileIDC == kProfileIDScalableHigh || profileIDC == kProfileIDSMultiviewHigh ||
-        profileIDC == kProfileIDStereoHigh) {
-        uint32_t chromaFormatIDC;
-        if (!parseUE(&br, &chromaFormatIDC)) return false;
-        if (chromaFormatIDC == kYUV444Idc) {  // chroma_format_idc
-            br.skipBits(1);                   // separate_colour_plane_flag
-        }
-
-        parseUE(&br, &unused);  // bit_depth_luma_minus8
-        parseUE(&br, &unused);  // bit_depth_chroma_minus8
-        br.skipBits(1);         // lossless_qpprime_y_zero_flag
-
-        uint32_t seqScalingMatrixPresentFlag;
-        if (!br.getBitsGraceful(1, &seqScalingMatrixPresentFlag))
-            return false;  // seq_scaling_matrix_present_flag
-        if (seqScalingMatrixPresentFlag) {
-            const size_t numScalingLists = (chromaFormatIDC != kYUV444Idc) ? 8 : 12;
-            if (!skipScalingMatrix(&br, numScalingLists)) return false;
-        }
-    }
-
-    parseUE(&br, &unused);  // log2_max_frame_num_minus4
-    uint32_t pictureOrderCountType;
-    if (!parseUE(&br, &pictureOrderCountType)) return false;  // pic_order_cnt_type
-    if (pictureOrderCountType == 0) {
-        parseUE(&br, &unused);  // log2_max_pic_order_cnt_lsb_minus4
-    } else if (pictureOrderCountType == 1) {
-        br.skipBits(1);  // delta_pic_order_always_zero_flag
-        int32_t unused_i;
-        parseSE(&br, &unused_i);  // offset_for_non_ref_pic
-        parseSE(&br, &unused_i);  // offset_for_top_to_bottom_field
-        uint32_t numReferenceFrames;
-        if (!parseUE(&br, &numReferenceFrames))
-            return false;  // num_ref_frames_in_pic_order_cnt_cycle
-        for (uint32_t i = 0; i < numReferenceFrames; ++i) {
-            parseUE(&br, &unused);  // offset_for_ref_frame
-        }
-    }
-
-    parseUE(&br, &unused);  // num_ref_frames
-    br.skipBits(1);         // gaps_in_frame_num_value_allowed_flag
-    parseUE(&br, &unused);  // pic_width_in_mbs_minus1
-    parseUE(&br, &unused);  // pic_height_in_map_units_minus1
-    uint32_t frameMbsOnlyFlag;
-    if (!br.getBitsGraceful(1, &frameMbsOnlyFlag)) return false;  // frame_mbs_only_flag
-    if (!frameMbsOnlyFlag) {
-        br.skipBits(1);  // mb_adaptive_frame_field_flag
-    }
-    br.skipBits(1);  // direct_8x8_inference_flag
-
-    uint32_t frameCroppingFlag;
-    if (!br.getBitsGraceful(1, &frameCroppingFlag)) return false;  // frame_cropping_flag
-    if (frameCroppingFlag) {
-        parseUE(&br, &unused);  // frame_cropping_rect_left_offset
-        parseUE(&br, &unused);  // frame_cropping_rect_right_offset
-        parseUE(&br, &unused);  // frame_cropping_rect_top_offset
-        parseUE(&br, &unused);  // frame_cropping_rect_bottom_offset
-    }
-
-    uint32_t vuiParametersPresentFlag;
-    if (!br.getBitsGraceful(1, &vuiParametersPresentFlag))
-        return false;  // vui_parameters_present_flag
-    if (vuiParametersPresentFlag) {
-        uint32_t aspectRatioInfoPresentFlag;
-        if (!br.getBitsGraceful(1, &aspectRatioInfoPresentFlag))
-            return false;  // VUI aspect_ratio_info_present_flag
-        if (aspectRatioInfoPresentFlag) {
-            uint32_t aspectRatioIdc;
-            if (!br.getBitsGraceful(8, &aspectRatioIdc)) return false;  // VUI aspect_ratio_idc
-            if (aspectRatioIdc == 255) {  // VUI aspect_ratio_idc == extended sample aspect ratio
-                br.skipBits(32);          // VUI sar_width + sar_height
-            }
-        }
-
-        uint32_t overscanInfoPresentFlag;
-        if (!br.getBitsGraceful(1, &overscanInfoPresentFlag))
-            return false;  // VUI overscan_info_present_flag
-        if (overscanInfoPresentFlag) {
-            br.skipBits(1);  // VUI overscan_appropriate_flag
-        }
-        uint32_t videoSignalTypePresentFlag;
-        if (!br.getBitsGraceful(1, &videoSignalTypePresentFlag))
-            return false;  // VUI video_signal_type_present_flag
-        if (videoSignalTypePresentFlag) {
-            br.skipBits(3);  // VUI video_format
-            uint32_t videoFullRangeFlag;
-            if (!br.getBitsGraceful(1, &videoFullRangeFlag))
-                return false;  // VUI videoFullRangeFlag
-            colorAspects->fullRange = videoFullRangeFlag;
-            uint32_t color_description_present_flag;
-            if (!br.getBitsGraceful(1, &color_description_present_flag))
-                return false;  // VUI color_description_present_flag
-            if (color_description_present_flag) {
-                if (!br.getBitsGraceful(8, &colorAspects->primaries))
-                    return false;  // VUI colour_primaries
-                if (!br.getBitsGraceful(8, &colorAspects->transfer))
-                    return false;  // VUI transfer_characteristics
-                if (!br.getBitsGraceful(8, &colorAspects->coeffs))
-                    return false;  // VUI matrix_coefficients
-                return true;
-            }
-        }
+// Read unsigned int encoded with exponential-golomb.
+bool NalParser::parseUE(ABitReader* br, uint32_t* val) {
+    uint32_t numZeroes = 0;
+    uint32_t bit;
+    if (!br->getBitsGraceful(1, &bit)) return false;
+    while (bit == 0) {
+        ++numZeroes;
+        if (!br->getBitsGraceful(1, &bit)) return false;
     }
+    if (!br->getBitsGraceful(numZeroes, val)) return false;
+    *val += (1u << numZeroes) - 1;
+    return true;
+}
 
-    return false;  // The NAL unit doesn't contain color aspects info.
+// Read signed int encoded with exponential-golomb.
+bool NalParser::parseSE(ABitReader* br, int32_t* val) {
+    uint32_t codeNum;
+    if (!parseUE(br, &codeNum)) return false;
+    *val = (codeNum & 1) ? (codeNum + 1) >> 1 : -static_cast<int32_t>(codeNum >> 1);
+    return true;
 }
 
 }  // namespace android
diff --git a/common/VideoTypes.cpp b/common/VideoTypes.cpp
index 18ebfc9e4eba2ff77253c06bc181cba859c704f9..61feb06ac6c74c2cdec9a69f0b10e1e6cfd2cef9 100644
--- a/common/VideoTypes.cpp
+++ b/common/VideoTypes.cpp
@@ -30,6 +30,8 @@ const char* profileToString(C2Config::profile_t profile) {
         return "unused";
     case C2Config::PROFILE_AVC_BASELINE:
         return "h264 baseline";
+    case C2Config::PROFILE_AVC_CONSTRAINED_BASELINE:
+        return "h264 constrained baseline";
     case C2Config::PROFILE_AVC_MAIN:
         return "h264 main";
     case C2Config::PROFILE_AVC_EXTENDED:
diff --git a/common/include/v4l2_codec2/common/Common.h b/common/include/v4l2_codec2/common/Common.h
index 0775af1941de5f1ff73ba87dde846c411b511ac6..ac55bd2189d1a7f98d59b827192311b0a6f3d800 100644
--- a/common/include/v4l2_codec2/common/Common.h
+++ b/common/include/v4l2_codec2/common/Common.h
@@ -15,6 +15,7 @@
 #include <ui/Size.h>
 
 #include <v4l2_codec2/common/VideoPixelFormat.h>
+#include <v4l2_codec2/common/VideoTypes.h>
 
 namespace android {
 
@@ -33,6 +34,26 @@ struct VideoFrameLayout {
     bool mMultiPlanar = false;
 };
 
+// Specification of an encoding profile supported by an encoder or decoder.
+struct SupportedProfile {
+    C2Config::profile_t profile = C2Config::PROFILE_UNUSED;
+    ui::Size min_resolution;
+    ui::Size max_resolution;
+    uint32_t max_framerate_numerator = 0;
+    uint32_t max_framerate_denominator = 0;
+    bool encrypted_only = false;
+};
+using SupportedProfiles = std::vector<SupportedProfile>;
+
+// Contains the capabilites of the decoder or encoder.
+struct SupportedCapabilities {
+    VideoCodec codec;
+    SupportedProfiles supportedProfiles;
+    C2Config::profile_t defaultProfile = C2Config::PROFILE_UNUSED;
+    std::vector<C2Config::level_t> supportedLevels;
+    C2Config::level_t defaultLevel = C2Config::LEVEL_UNUSED;
+};
+
 // Check whether |rect1| completely contains |rect2|.
 bool contains(const Rect& rect1, const Rect& rect2);
 
@@ -48,6 +69,9 @@ bool isEmpty(const ui::Size& size);
 // Convert the specified |size| to a string.
 std::string toString(const ui::Size& size);
 
+// Check whether specified profile can be used with specified codec
+bool isValidProfileForCodec(VideoCodec codec, C2Config::profile_t profile);
+
 }  // namespace android
 
 #endif  // ANDROID_V4L2_CODEC2_COMMON_COMMON_H
diff --git a/common/include/v4l2_codec2/common/FormatConverter.h b/common/include/v4l2_codec2/common/FormatConverter.h
index bc3f85acda9eb4b19bb3da4d16ecabae84828f31..b4aaccacc3cd62bb12d20d0d55cca618a10ad6a4 100644
--- a/common/include/v4l2_codec2/common/FormatConverter.h
+++ b/common/include/v4l2_codec2/common/FormatConverter.h
@@ -28,7 +28,7 @@ public:
     ~ImplDefinedToRGBXMap();
     ImplDefinedToRGBXMap() = delete;
 
-    static std::unique_ptr<ImplDefinedToRGBXMap> Create(const C2ConstGraphicBlock& block);
+    static std::unique_ptr<ImplDefinedToRGBXMap> create(const C2ConstGraphicBlock& block);
 
     const uint8_t* addr() const { return mAddr; }
     int offset() const { return 0; }
@@ -51,23 +51,20 @@ public:
 
     // Create FormatConverter instance and initialize it, nullptr will be returned on
     // initialization error.
-    static std::unique_ptr<FormatConverter> Create(VideoPixelFormat outFormat,
+    static std::unique_ptr<FormatConverter> create(VideoPixelFormat outFormat,
                                                    const ui::Size& visibleSize, uint32_t inputCount,
                                                    const ui::Size& codedSize);
 
-    // Convert the input block into the alternative block with required pixel format and return it,
-    // or return the original block if zero-copy is applied.
-    C2ConstGraphicBlock convertBlock(uint64_t frameIndex, const C2ConstGraphicBlock& inputBlock,
-                                     c2_status_t* status /* non-null */);
+    // Convert the |inputBlock| to the configured pixel format and return it as |convertedBlock|.
+    // Returns the original block if no conversion is required.
+    c2_status_t convertBlock(uint64_t frameIndex, const C2ConstGraphicBlock& inputBlock,
+                             C2ConstGraphicBlock* convertedBlock);
     // Return the block ownership when VEA no longer needs it, or erase the zero-copy BlockEntry.
     c2_status_t returnBlock(uint64_t frameIndex);
     // Check if there is available block for conversion.
     bool isReady() const { return !mAvailableQueue.empty(); }
 
 private:
-    // The minimal number requirement of allocated buffers for conversion. This value is the same as
-    // kMinInputBufferArraySize from CCodecBufferChannel.
-    static constexpr uint32_t kMinInputBufferCount = 8;
     // The constant used by BlockEntry to indicate no frame is associated with the BlockEntry.
     static constexpr uint64_t kNoFrameAssociated = ~static_cast<uint64_t>(0);
 
@@ -91,11 +88,14 @@ private:
 
     FormatConverter() = default;
 
-    // Initialize foramt converter. It will pre-allocate a set of graphic blocks as |codedSize| and
-    // |outFormat|. This function should be called prior to other functions.
+    // Initialize format converter. This pre-allocates a set of graphic blocks with |codedSize| and
+    // |outFormat| for format conversion. This function should be called prior to other functions.
     c2_status_t initialize(VideoPixelFormat outFormat, const ui::Size& visibleSize,
                            uint32_t inputCount, const ui::Size& codedSize);
 
+    // Allocate a set of graphic blocks with |mCodedSize| and |mOutFormat| for format conversion.
+    c2_status_t allocateBuffers(uint32_t count);
+
     // The array of block entries.
     std::vector<std::unique_ptr<BlockEntry>> mGraphicBlocks;
     // The queue of recording the raw pointers of available graphic blocks. The consumed block will
@@ -106,8 +106,12 @@ private:
     std::unique_ptr<uint8_t[]> mTempPlaneU;
     std::unique_ptr<uint8_t[]> mTempPlaneV;
 
+    // The output pixel format.
     VideoPixelFormat mOutFormat = VideoPixelFormat::UNKNOWN;
+    // The video frame visible size.
     ui::Size mVisibleSize;
+    // The video frame coded size.
+    ui::Size mCodedSize;
 };
 
 }  // namespace android
diff --git a/common/include/v4l2_codec2/common/H264NalParser.h b/common/include/v4l2_codec2/common/H264NalParser.h
new file mode 100644
index 0000000000000000000000000000000000000000..125efd83268890678c1305b88229a8143ab24f1f
--- /dev/null
+++ b/common/include/v4l2_codec2/common/H264NalParser.h
@@ -0,0 +1,40 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef ANDROID_V4L2_CODEC2_COMMON_H264NALPARSER_H
+#define ANDROID_V4L2_CODEC2_COMMON_H264NALPARSER_H
+
+#include <stdint.h>
+
+#include <v4l2_codec2/common/NalParser.h>
+
+namespace android {
+
+// Helper class to parse H264 NAL units from data.
+class H264NalParser : public NalParser {
+public:
+    // Type of a IDR Slice NAL unit.
+    static constexpr uint8_t kIDRType = 5;
+    // Type of a SPS NAL unit.
+    static constexpr uint8_t kSPSType = 7;
+    // Type of a PPS NAL unit.
+    static constexpr uint8_t kPPSType = 8;
+
+    H264NalParser(const uint8_t* data, size_t length);
+    ~H264NalParser() = default;
+
+    // Locate the sequence parameter set (SPS).
+    bool locateSPS() override;
+    bool locateIDR() override;
+
+    // Get the type of the current NAL unit.
+    uint8_t type() const;
+
+    // Find the H.264 video's color aspects in the current SPS NAL.
+    bool findCodedColorAspects(ColorAspects* colorAspects) override;
+};
+
+}  // namespace android
+
+#endif  // ANDROID_V4L2_CODEC2_COMMON_H264NALPARSER_H
diff --git a/common/include/v4l2_codec2/common/HEVCNalParser.h b/common/include/v4l2_codec2/common/HEVCNalParser.h
new file mode 100644
index 0000000000000000000000000000000000000000..0f4574a2616a80600835b511ffebd5d702bf45ca
--- /dev/null
+++ b/common/include/v4l2_codec2/common/HEVCNalParser.h
@@ -0,0 +1,37 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef ANDROID_V4L2_CODEC2_COMMON_HEVCNALPARSER_H
+#define ANDROID_V4L2_CODEC2_COMMON_HEVCNALPARSER_H
+
+#include <stdint.h>
+
+#include <v4l2_codec2/common/NalParser.h>
+
+namespace android {
+
+// Helper class to parse HEVC NAL units from data.
+class HEVCNalParser : public NalParser {
+public:
+    // Type of a SPS NAL unit.
+    static constexpr uint8_t kIDRType = 19;  //IDR_W_RADL
+    static constexpr uint8_t kSPSType = 33;
+
+    HEVCNalParser(const uint8_t* data, size_t length);
+    ~HEVCNalParser() = default;
+
+    // Locate the sequence parameter set (SPS).
+    bool locateSPS() override;
+    bool locateIDR() override;
+
+    // Get the type of the current NAL unit.
+    uint8_t type() const override;
+
+    // Find the HEVC video's color aspects in the current SPS NAL.
+    bool findCodedColorAspects(ColorAspects* colorAspects) override;
+};
+
+}  // namespace android
+
+#endif  // ANDROID_V4L2_CODEC2_COMMON_HEVCNALPARSER_H
diff --git a/common/include/v4l2_codec2/common/NalParser.h b/common/include/v4l2_codec2/common/NalParser.h
index ec8a8769fbf47c15823964652c6445dab50319f4..50032d3a8846a8a100dc1d67ea510926df84a04b 100644
--- a/common/include/v4l2_codec2/common/NalParser.h
+++ b/common/include/v4l2_codec2/common/NalParser.h
@@ -7,18 +7,13 @@
 
 #include <stdint.h>
 
+#include <media/stagefright/foundation/ABitReader.h>
+
 namespace android {
 
-// Helper class to parse H264 NAL units from data.
+// Helper class to parse NAL units from data.
 class NalParser {
 public:
-    // Type of a IDR Slice NAL unit.
-    static constexpr uint8_t kIDRType = 5;
-    // Type of a SPS NAL unit.
-    static constexpr uint8_t kSPSType = 7;
-    // Type of a PPS NAL unit.
-    static constexpr uint8_t kPPSType = 8;
-
     // Parameters related to a video's color aspects.
     struct ColorAspects {
         uint32_t primaries;
@@ -28,6 +23,7 @@ public:
     };
 
     NalParser(const uint8_t* data, size_t length);
+    virtual ~NalParser() = default;
 
     // Locates the next NAL after |mNextNalStartCodePos|. If there is one, updates |mCurrNalDataPos|
     // to the first byte of the NAL data (start code is not included), and |mNextNalStartCodePos| to
@@ -38,7 +34,8 @@ public:
     bool locateNextNal();
 
     // Locate the sequence parameter set (SPS).
-    bool locateSPS();
+    virtual bool locateSPS() = 0;
+    virtual bool locateIDR() = 0;
 
     // Gets current NAL data (start code is not included).
     const uint8_t* data() const;
@@ -47,15 +44,21 @@ public:
     size_t length() const;
 
     // Get the type of the current NAL unit.
-    uint8_t type() const;
+    virtual uint8_t type() const = 0;
+
+    // Find the video's color aspects in the current SPS NAL.
+    virtual bool findCodedColorAspects(ColorAspects* colorAspects) = 0;
+
+    // Read unsigned int encoded with exponential-golomb.
+    static bool parseUE(ABitReader* br, uint32_t* val);
 
-    // Find the H.264 video's color aspects in the current SPS NAL.
-    bool findCodedColorAspects(ColorAspects* colorAspects);
+    // Read signed int encoded with exponential-golomb.
+    static bool parseSE(ABitReader* br, int32_t* val);
 
-private:
+protected:
     const uint8_t* findNextStartCodePos() const;
 
-    // The byte pattern for the start of a H264 NAL unit.
+    // The byte pattern for the start of a NAL unit.
     const uint8_t kNalStartCode[3] = {0x00, 0x00, 0x01};
     // The length in bytes of the NAL-unit start pattern.
     const size_t kNalStartCodeLength = 3;
diff --git a/components/Android.bp b/components/Android.bp
index 5bee73b2d83f724231a39f483753667408c0fe93..73efcf20cb114e2c3add2a456f952a7f9ba78344 100644
--- a/components/Android.bp
+++ b/components/Android.bp
@@ -18,14 +18,11 @@ cc_library {
     srcs: [
         "VideoFrame.cpp",
         "VideoFramePool.cpp",
-        "V4L2ComponentFactory.cpp",
-        "V4L2ComponentStore.cpp",
-        "V4L2Decoder.cpp",
-        "V4L2DecodeComponent.cpp",
-        "V4L2DecodeInterface.cpp",
-        "V4L2Encoder.cpp",
-        "V4L2EncodeComponent.cpp",
-        "V4L2EncodeInterface.cpp",
+        "ComponentStore.cpp",
+        "DecodeComponent.cpp",
+        "DecodeInterface.cpp",
+        "EncodeComponent.cpp",
+        "EncodeInterface.cpp",
         "VideoDecoder.cpp",
         "VideoEncoder.cpp",
     ],
diff --git a/components/ComponentStore.cpp b/components/ComponentStore.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..aa4eba8c8bee68137c1f74c65d2695af331c2adf
--- /dev/null
+++ b/components/ComponentStore.cpp
@@ -0,0 +1,207 @@
+// Copyright 2023 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ComponentStore"
+
+#include <v4l2_codec2/components/ComponentStore.h>
+
+#include <stdint.h>
+
+#include <memory>
+#include <mutex>
+
+#include <C2.h>
+#include <C2Config.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <v4l2_codec2/common/VideoTypes.h>
+
+namespace android {
+namespace {
+const uint32_t kComponentRank = 0x80;
+
+}  // namespace
+
+ComponentStore::ComponentStore(C2String storeName)
+      : mStoreName(std::move(storeName)), mReflector(std::make_shared<C2ReflectorHelper>()) {
+    ALOGV("%s()", __func__);
+}
+
+ComponentStore::~ComponentStore() {
+    ALOGV("%s()", __func__);
+
+    std::lock_guard<std::mutex> lock(mCachedFactoriesLock);
+    mCachedFactories.clear();
+}
+
+C2String ComponentStore::getName() const {
+    return mStoreName;
+}
+
+c2_status_t ComponentStore::createComponent(C2String name,
+                                            std::shared_ptr<C2Component>* const component) {
+    ALOGV("%s(%s)", __func__, name.c_str());
+
+    const auto& decl = mDeclarations.find(name);
+    if (decl == mDeclarations.end()) {
+        ALOGI("%s(): Invalid component name: %s", __func__, name.c_str());
+        return C2_NOT_FOUND;
+    }
+
+    auto factory = getFactory(name);
+    if (factory == nullptr) return C2_CORRUPTED;
+
+    component->reset();
+    return factory->createComponent(0, component);
+}
+
+c2_status_t ComponentStore::createInterface(
+        C2String name, std::shared_ptr<C2ComponentInterface>* const interface) {
+    ALOGV("%s(%s)", __func__, name.c_str());
+
+    const auto& decl = mDeclarations.find(name);
+    if (decl == mDeclarations.end()) {
+        ALOGI("%s(): Invalid component name: %s", __func__, name.c_str());
+        return C2_NOT_FOUND;
+    }
+
+    auto factory = getFactory(name);
+    if (factory == nullptr) return C2_CORRUPTED;
+
+    interface->reset();
+    return factory->createInterface(0, interface);
+}
+
+std::vector<std::shared_ptr<const C2Component::Traits>> ComponentStore::listComponents() {
+    ALOGV("%s()", __func__);
+
+    std::vector<std::shared_ptr<const C2Component::Traits>> ret;
+    for (const auto& decl : mDeclarations) {
+        ret.push_back(getTraits(decl.first));
+    }
+
+    return ret;
+}
+
+std::shared_ptr<C2ParamReflector> ComponentStore::getParamReflector() const {
+    return mReflector;
+}
+
+c2_status_t ComponentStore::copyBuffer(std::shared_ptr<C2GraphicBuffer> /* src */,
+                                       std::shared_ptr<C2GraphicBuffer> /* dst */) {
+    return C2_OMITTED;
+}
+
+c2_status_t ComponentStore::querySupportedParams_nb(
+        std::vector<std::shared_ptr<C2ParamDescriptor>>* const /* params */) const {
+    return C2_OK;
+}
+
+c2_status_t ComponentStore::query_sm(
+        const std::vector<C2Param*>& stackParams,
+        const std::vector<C2Param::Index>& heapParamIndices,
+        std::vector<std::unique_ptr<C2Param>>* const /* heapParams */) const {
+    // There are no supported config params.
+    return stackParams.empty() && heapParamIndices.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+c2_status_t ComponentStore::config_sm(
+        const std::vector<C2Param*>& params,
+        std::vector<std::unique_ptr<C2SettingResult>>* const /* failures */) {
+    // There are no supported config params.
+    return params.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+c2_status_t ComponentStore::querySupportedValues_sm(
+        std::vector<C2FieldSupportedValuesQuery>& fields) const {
+    // There are no supported config params.
+    return fields.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+::C2ComponentFactory* ComponentStore::getFactory(const C2String& name) {
+    ALOGV("%s(%s)", __func__, name.c_str());
+    ALOG_ASSERT(V4L2ComponentName::isValid(name.c_str()));
+
+    std::lock_guard<std::mutex> lock(mCachedFactoriesLock);
+    const auto it = mCachedFactories.find(name);
+    if (it != mCachedFactories.end()) return it->second.get();
+
+    const auto& decl = mDeclarations.find(name);
+    if (decl == mDeclarations.end()) {
+        ALOGI("%s(): Invalid component name: %s", __func__, name.c_str());
+        return nullptr;
+    }
+
+    std::unique_ptr<::C2ComponentFactory> factory = decl->second.factory(name, mReflector);
+    if (factory == nullptr) {
+        ALOGE("Failed to create factory for %s", name.c_str());
+        return nullptr;
+    }
+
+    auto ret = factory.get();
+    mCachedFactories.emplace(name, std::move(factory));
+    return ret;
+}
+
+std::shared_ptr<const C2Component::Traits> ComponentStore::getTraits(const C2String& name) {
+    ALOGV("%s(%s)", __func__, name.c_str());
+
+    const auto& iter = mDeclarations.find(name);
+    if (iter == mDeclarations.end()) {
+        ALOGE("Invalid component name: %s", name.c_str());
+        return nullptr;
+    }
+
+    const Declaration& decl = iter->second;
+
+    std::lock_guard<std::mutex> lock(mCachedTraitsLock);
+    auto it = mCachedTraits.find(name);
+    if (it != mCachedTraits.end()) return it->second;
+
+    auto traits = std::make_shared<C2Component::Traits>();
+    traits->name = name;
+    traits->domain = C2Component::DOMAIN_VIDEO;
+    traits->rank = kComponentRank;
+    traits->kind = decl.kind;
+
+    switch (decl.codec) {
+    case VideoCodec::H264:
+        traits->mediaType = MEDIA_MIMETYPE_VIDEO_AVC;
+        break;
+    case VideoCodec::VP8:
+        traits->mediaType = MEDIA_MIMETYPE_VIDEO_VP8;
+        break;
+    case VideoCodec::VP9:
+        traits->mediaType = MEDIA_MIMETYPE_VIDEO_VP9;
+        break;
+    case VideoCodec::HEVC:
+        traits->mediaType = MEDIA_MIMETYPE_VIDEO_HEVC;
+        break;
+    }
+
+    mCachedTraits.emplace(name, traits);
+    return traits;
+}
+
+ComponentStore::Builder::Builder(C2String storeName)
+      : mStore(new ComponentStore(std::move(storeName))) {}
+
+ComponentStore::Builder& ComponentStore::Builder::decoder(std::string name, VideoCodec codec,
+                                                          GetFactory factory) {
+    mStore->mDeclarations[name] = Declaration{codec, C2Component::KIND_DECODER, std::move(factory)};
+    return *this;
+}
+
+ComponentStore::Builder& ComponentStore::Builder::encoder(std::string name, VideoCodec codec,
+                                                          GetFactory factory) {
+    mStore->mDeclarations[name] = Declaration{codec, C2Component::KIND_ENCODER, std::move(factory)};
+    return *this;
+}
+
+std::shared_ptr<ComponentStore> ComponentStore::Builder::build() && {
+    return std::shared_ptr<ComponentStore>(std::move(mStore));
+}
+}  // namespace android
diff --git a/components/V4L2DecodeComponent.cpp b/components/DecodeComponent.cpp
similarity index 69%
rename from components/V4L2DecodeComponent.cpp
rename to components/DecodeComponent.cpp
index 2770b1ecd4b7c5303e87dc61f8eae7eac160f457..c88fa039139e89336910960879a68c20412a9d76 100644
--- a/components/V4L2DecodeComponent.cpp
+++ b/components/DecodeComponent.cpp
@@ -1,11 +1,12 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "V4L2DecodeComponent"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#define LOG_TAG "DecodeComponent"
 
-#include <v4l2_codec2/components/V4L2DecodeComponent.h>
+#include <v4l2_codec2/components/DecodeComponent.h>
 
 #include <inttypes.h>
 #include <linux/videodev2.h>
@@ -19,60 +20,48 @@
 #include <SimpleC2Interface.h>
 #include <base/bind.h>
 #include <base/callback_helpers.h>
+#include <base/strings/stringprintf.h>
 #include <base/time/time.h>
 #include <cutils/properties.h>
 #include <log/log.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <utils/Trace.h>
 
 #include <v4l2_codec2/common/Common.h>
-#include <v4l2_codec2/common/NalParser.h>
+#include <v4l2_codec2/common/H264NalParser.h>
+#include <v4l2_codec2/common/HEVCNalParser.h>
 #include <v4l2_codec2/common/VideoTypes.h>
 #include <v4l2_codec2/components/BitstreamBuffer.h>
-#include <v4l2_codec2/components/V4L2Decoder.h>
 #include <v4l2_codec2/components/VideoFramePool.h>
 
 namespace android {
 namespace {
 
-// CCBC pauses sending input buffers to the component when all the output slots are filled by
-// pending decoded buffers. If the available output buffers are exhausted before CCBC pauses sending
-// input buffers, CCodec may timeout due to waiting for a available output buffer.
-// This function returns the minimum number of output buffers to prevent the buffers from being
-// exhausted before CCBC pauses sending input buffers.
-size_t getMinNumOutputBuffers(VideoCodec codec) {
-    // The constant values copied from CCodecBufferChannel.cpp.
-    // (b/184020290): Check the value still sync when seeing error message from CCodec:
-    // "previous call to queue exceeded timeout".
-    constexpr size_t kSmoothnessFactor = 4;
-    constexpr size_t kRenderingDepth = 3;
-    // Extra number of needed output buffers for V4L2Decoder.
-    constexpr size_t kExtraNumOutputBuffersForDecoder = 2;
-
-    // The total needed number of output buffers at pipeline are:
-    // - MediaCodec output slots: output delay + kSmoothnessFactor
-    // - Surface: kRenderingDepth
-    // - Component: kExtraNumOutputBuffersForDecoder
-    return V4L2DecodeInterface::getOutputDelay(codec) + kSmoothnessFactor + kRenderingDepth +
-           kExtraNumOutputBuffersForDecoder;
-}
-
 // Mask against 30 bits to avoid (undefined) wraparound on signed integer.
 int32_t frameIndexToBitstreamId(c2_cntr64_t frameIndex) {
     return static_cast<int32_t>(frameIndex.peeku() & 0x3FFFFFFF);
 }
 
-bool parseCodedColorAspects(const C2ConstLinearBlock& input,
+bool parseCodedColorAspects(const C2ConstLinearBlock& input, std::optional<VideoCodec> codec,
                             C2StreamColorAspectsInfo::input* codedAspects) {
     C2ReadView view = input.map().get();
-    NalParser parser(view.data(), view.capacity());
+    NalParser::ColorAspects aspects;
+    std::unique_ptr<NalParser> parser;
+    if (codec == VideoCodec::H264) {
+        parser = std::make_unique<H264NalParser>(view.data(), view.capacity());
+    } else if (codec == VideoCodec::HEVC) {
+        parser = std::make_unique<HEVCNalParser>(view.data(), view.capacity());
+    } else {
+        ALOGV("Unsupported codec for finding color aspects");
+        return false;
+    }
 
-    if (!parser.locateSPS()) {
+    if (!parser->locateSPS()) {
         ALOGV("Couldn't find SPS");
         return false;
     }
 
-    NalParser::ColorAspects aspects;
-    if (!parser.findCodedColorAspects(&aspects)) {
+    if (!parser->findCodedColorAspects(&aspects)) {
         ALOGV("Couldn't find color description in SPS");
         return false;
     }
@@ -137,55 +126,26 @@ bool isNoShowFrameWork(const C2Work& work, const C2WorkOrdinalStruct& currOrdina
 
 }  // namespace
 
-// static
-std::atomic<int32_t> V4L2DecodeComponent::sConcurrentInstances = 0;
-
-// static
-std::shared_ptr<C2Component> V4L2DecodeComponent::create(
-        const std::string& name, c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper>& helper,
-        C2ComponentFactory::ComponentDeleter deleter) {
-    static const int32_t kMaxConcurrentInstances =
-            property_get_int32("ro.vendor.v4l2_codec2.decode_concurrent_instances", -1);
-    static std::mutex mutex;
-
-    std::lock_guard<std::mutex> lock(mutex);
-
-    if (kMaxConcurrentInstances >= 0 && sConcurrentInstances.load() >= kMaxConcurrentInstances) {
-        ALOGW("Reject to Initialize() due to too many instances: %d", sConcurrentInstances.load());
-        return nullptr;
-    }
-
-    auto intfImpl = std::make_shared<V4L2DecodeInterface>(name, helper);
-    if (intfImpl->status() != C2_OK) {
-        ALOGE("Failed to initialize V4L2DecodeInterface.");
-        return nullptr;
-    }
-
-    return std::shared_ptr<C2Component>(new V4L2DecodeComponent(name, id, helper, intfImpl),
-                                        deleter);
-}
-
-V4L2DecodeComponent::V4L2DecodeComponent(const std::string& name, c2_node_id_t id,
-                                         const std::shared_ptr<C2ReflectorHelper>& helper,
-                                         const std::shared_ptr<V4L2DecodeInterface>& intfImpl)
-      : mIntfImpl(intfImpl),
-        mIntf(std::make_shared<SimpleInterface<V4L2DecodeInterface>>(name.c_str(), id, mIntfImpl)) {
+DecodeComponent::DecodeComponent(uint32_t debugStreamId, const std::string& name, c2_node_id_t id,
+                                 const std::shared_ptr<DecodeInterface>& intfImpl)
+      : mDebugStreamId(debugStreamId),
+        mIntfImpl(intfImpl),
+        mIntf(std::make_shared<SimpleInterface<DecodeInterface>>(name.c_str(), id, mIntfImpl)) {
     ALOGV("%s(%s)", __func__, name.c_str());
-
-    sConcurrentInstances.fetch_add(1, std::memory_order_relaxed);
     mIsSecure = name.find(".secure") != std::string::npos;
 }
 
-V4L2DecodeComponent::~V4L2DecodeComponent() {
+DecodeComponent::~DecodeComponent() {
     ALOGV("%s()", __func__);
-
-    release();
-
-    sConcurrentInstances.fetch_sub(1, std::memory_order_relaxed);
+    if (mDecoderThread.IsRunning() && !mDecoderTaskRunner->RunsTasksInCurrentSequence()) {
+        mDecoderTaskRunner->PostTask(FROM_HERE,
+                                     ::base::BindOnce(&DecodeComponent::releaseTask, mWeakThis));
+        mDecoderThread.Stop();
+    }
     ALOGV("%s() done", __func__);
 }
 
-c2_status_t V4L2DecodeComponent::start() {
+c2_status_t DecodeComponent::start() {
     ALOGV("%s()", __func__);
     std::lock_guard<std::mutex> lock(mStartStopLock);
 
@@ -205,7 +165,7 @@ c2_status_t V4L2DecodeComponent::start() {
     c2_status_t status = C2_CORRUPTED;
     ::base::WaitableEvent done;
     mDecoderTaskRunner->PostTask(
-            FROM_HERE, ::base::BindOnce(&V4L2DecodeComponent::startTask, mWeakThis,
+            FROM_HERE, ::base::BindOnce(&DecodeComponent::startTask, mWeakThis,
                                         ::base::Unretained(&status), ::base::Unretained(&done)));
     done.Wait();
 
@@ -213,55 +173,15 @@ c2_status_t V4L2DecodeComponent::start() {
     return status;
 }
 
-void V4L2DecodeComponent::startTask(c2_status_t* status, ::base::WaitableEvent* done) {
-    ALOGV("%s()", __func__);
-    ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
-
-    ::base::ScopedClosureRunner done_caller(
-            ::base::BindOnce(&::base::WaitableEvent::Signal, ::base::Unretained(done)));
-    *status = C2_CORRUPTED;
-
-    const auto codec = mIntfImpl->getVideoCodec();
-    if (!codec) {
-        ALOGE("Failed to get video codec.");
-        return;
-    }
-    const size_t inputBufferSize = mIntfImpl->getInputBufferSize();
-    const size_t minNumOutputBuffers = getMinNumOutputBuffers(*codec);
-
-    // ::base::Unretained(this) is safe here because |mDecoder| is always destroyed before
-    // |mDecoderThread| is stopped, so |*this| is always valid during |mDecoder|'s lifetime.
-    mDecoder = V4L2Decoder::Create(*codec, inputBufferSize, minNumOutputBuffers,
-                                   ::base::BindRepeating(&V4L2DecodeComponent::getVideoFramePool,
-                                                         ::base::Unretained(this)),
-                                   ::base::BindRepeating(&V4L2DecodeComponent::onOutputFrameReady,
-                                                         ::base::Unretained(this)),
-                                   ::base::BindRepeating(&V4L2DecodeComponent::reportError,
-                                                         ::base::Unretained(this), C2_CORRUPTED),
-                                   mDecoderTaskRunner);
-    if (!mDecoder) {
-        ALOGE("Failed to create V4L2Decoder for %s", VideoCodecToString(*codec));
-        return;
-    }
-
-    // Get default color aspects on start.
-    if (!mIsSecure && *codec == VideoCodec::H264) {
-        if (mIntfImpl->queryColorAspects(&mCurrentColorAspects) != C2_OK) return;
-        mPendingColorAspectsChange = false;
-    }
-
-    *status = C2_OK;
-}
-
-std::unique_ptr<VideoFramePool> V4L2DecodeComponent::getVideoFramePool(const ui::Size& size,
-                                                                       HalPixelFormat pixelFormat,
-                                                                       size_t numBuffers) {
+std::unique_ptr<VideoFramePool> DecodeComponent::getVideoFramePool(const ui::Size& size,
+                                                                   HalPixelFormat pixelFormat,
+                                                                   size_t numBuffers) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
     auto sharedThis = weak_from_this().lock();
     if (sharedThis == nullptr) {
-        ALOGE("%s(): V4L2DecodeComponent instance is destroyed.", __func__);
+        ALOGE("%s(): DecodeComponent instance is destroyed.", __func__);
         return nullptr;
     }
 
@@ -289,7 +209,7 @@ std::unique_ptr<VideoFramePool> V4L2DecodeComponent::getVideoFramePool(const ui:
                                   mDecoderTaskRunner);
 }
 
-c2_status_t V4L2DecodeComponent::stop() {
+c2_status_t DecodeComponent::stop() {
     ALOGV("%s()", __func__);
     std::lock_guard<std::mutex> lock(mStartStopLock);
 
@@ -301,7 +221,7 @@ c2_status_t V4L2DecodeComponent::stop() {
 
     if (mDecoderThread.IsRunning()) {
         mDecoderTaskRunner->PostTask(FROM_HERE,
-                                     ::base::BindOnce(&V4L2DecodeComponent::stopTask, mWeakThis));
+                                     ::base::BindOnce(&DecodeComponent::stopTask, mWeakThis));
         mDecoderThread.Stop();
         mDecoderTaskRunner = nullptr;
     }
@@ -310,7 +230,8 @@ c2_status_t V4L2DecodeComponent::stop() {
     return C2_OK;
 }
 
-void V4L2DecodeComponent::stopTask() {
+void DecodeComponent::stopTask() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -320,19 +241,19 @@ void V4L2DecodeComponent::stopTask() {
     releaseTask();
 }
 
-c2_status_t V4L2DecodeComponent::reset() {
+c2_status_t DecodeComponent::reset() {
     ALOGV("%s()", __func__);
 
     return stop();
 }
 
-c2_status_t V4L2DecodeComponent::release() {
+c2_status_t DecodeComponent::release() {
     ALOGV("%s()", __func__);
     std::lock_guard<std::mutex> lock(mStartStopLock);
 
     if (mDecoderThread.IsRunning()) {
-        mDecoderTaskRunner->PostTask(
-                FROM_HERE, ::base::BindOnce(&V4L2DecodeComponent::releaseTask, mWeakThis));
+        mDecoderTaskRunner->PostTask(FROM_HERE,
+                                     ::base::BindOnce(&DecodeComponent::releaseTask, mWeakThis));
         mDecoderThread.Stop();
         mDecoderTaskRunner = nullptr;
     }
@@ -341,7 +262,8 @@ c2_status_t V4L2DecodeComponent::release() {
     return C2_OK;
 }
 
-void V4L2DecodeComponent::releaseTask() {
+void DecodeComponent::releaseTask() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -349,8 +271,8 @@ void V4L2DecodeComponent::releaseTask() {
     mDecoder = nullptr;
 }
 
-c2_status_t V4L2DecodeComponent::setListener_vb(
-        const std::shared_ptr<C2Component::Listener>& listener, c2_blocking_t mayBlock) {
+c2_status_t DecodeComponent::setListener_vb(const std::shared_ptr<C2Component::Listener>& listener,
+                                            c2_blocking_t mayBlock) {
     ALOGV("%s()", __func__);
 
     auto currentState = mComponentState.load();
@@ -372,14 +294,14 @@ c2_status_t V4L2DecodeComponent::setListener_vb(
     }
 
     ::base::WaitableEvent done;
-    mDecoderTaskRunner->PostTask(FROM_HERE, ::base::Bind(&V4L2DecodeComponent::setListenerTask,
-                                                         mWeakThis, listener, &done));
+    mDecoderTaskRunner->PostTask(
+            FROM_HERE, ::base::Bind(&DecodeComponent::setListenerTask, mWeakThis, listener, &done));
     done.Wait();
     return C2_OK;
 }
 
-void V4L2DecodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
-                                          ::base::WaitableEvent* done) {
+void DecodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
+                                      ::base::WaitableEvent* done) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -387,7 +309,7 @@ void V4L2DecodeComponent::setListenerTask(const std::shared_ptr<Listener>& liste
     done->Signal();
 }
 
-c2_status_t V4L2DecodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
+c2_status_t DecodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
     ALOGV("%s()", __func__);
 
     auto currentState = mComponentState.load();
@@ -397,15 +319,22 @@ c2_status_t V4L2DecodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* co
     }
 
     while (!items->empty()) {
+        if (ATRACE_ENABLED()) {
+            const std::string atraceLabel = ::base::StringPrintf("#%u C2Work", mDebugStreamId);
+            ATRACE_ASYNC_BEGIN(atraceLabel.c_str(),
+                               items->front()->input.ordinal.frameIndex.peekull());
+        }
+
         mDecoderTaskRunner->PostTask(FROM_HERE,
-                                     ::base::BindOnce(&V4L2DecodeComponent::queueTask, mWeakThis,
+                                     ::base::BindOnce(&DecodeComponent::queueTask, mWeakThis,
                                                       std::move(items->front())));
         items->pop_front();
     }
     return C2_OK;
 }
 
-void V4L2DecodeComponent::queueTask(std::unique_ptr<C2Work> work) {
+void DecodeComponent::queueTask(std::unique_ptr<C2Work> work) {
+    ATRACE_CALL();
     ALOGV("%s(): flags=0x%x, index=%llu, timestamp=%llu", __func__, work->input.flags,
           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull());
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
@@ -440,7 +369,8 @@ void V4L2DecodeComponent::queueTask(std::unique_ptr<C2Work> work) {
     pumpPendingWorks();
 }
 
-void V4L2DecodeComponent::pumpPendingWorks() {
+void DecodeComponent::pumpPendingWorks() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -466,43 +396,15 @@ void V4L2DecodeComponent::pumpPendingWorks() {
         ALOGW_IF(!res.second, "We already inserted bitstreamId %d to decoder?", bitstreamId);
 
         if (!isEmptyWork) {
-            // If input.buffers is not empty, the buffer should have meaningful content inside.
-            C2ConstLinearBlock linearBlock =
-                    work->input.buffers.front()->data().linearBlocks().front();
-            ALOG_ASSERT(linearBlock.size() > 0u, "Input buffer of work(%d) is empty.", bitstreamId);
-
-            // Try to parse color aspects from bitstream for CSD work of non-secure H264 codec.
-            if (isCSDWork && !mIsSecure && (mIntfImpl->getVideoCodec() == VideoCodec::H264)) {
-                C2StreamColorAspectsInfo::input codedAspects = {0u};
-                if (parseCodedColorAspects(linearBlock, &codedAspects)) {
-                    std::vector<std::unique_ptr<C2SettingResult>> failures;
-                    c2_status_t status =
-                            mIntfImpl->config({&codedAspects}, C2_MAY_BLOCK, &failures);
-                    if (status != C2_OK) {
-                        ALOGE("Failed to config color aspects to interface: %d", status);
-                        reportError(status);
-                        return;
-                    }
-
-                    // Record current frame index, color aspects should be updated only for output
-                    // buffers whose frame indices are not less than this one.
-                    mPendingColorAspectsChange = true;
-                    mPendingColorAspectsChangeFrameIndex = work->input.ordinal.frameIndex.peeku();
-                }
-            }
-
-            std::unique_ptr<ConstBitstreamBuffer> buffer = std::make_unique<ConstBitstreamBuffer>(
-                    bitstreamId, linearBlock, linearBlock.offset(), linearBlock.size());
-            if (!buffer) {
-                reportError(C2_CORRUPTED);
-                return;
+            if (isCSDWork) {
+                processCSDWork(bitstreamId, work);
+            } else {
+                processWork(bitstreamId, work);
             }
-            mDecoder->decode(std::move(buffer), ::base::BindOnce(&V4L2DecodeComponent::onDecodeDone,
-                                                                 mWeakThis, bitstreamId));
         }
 
         if (isEOSWork) {
-            mDecoder->drain(::base::BindOnce(&V4L2DecodeComponent::onDrainDone, mWeakThis));
+            mDecoder->drain(::base::BindOnce(&DecodeComponent::onDrainDone, mWeakThis));
             mIsDraining = true;
         }
 
@@ -511,7 +413,63 @@ void V4L2DecodeComponent::pumpPendingWorks() {
     }
 }
 
-void V4L2DecodeComponent::onDecodeDone(int32_t bitstreamId, VideoDecoder::DecodeStatus status) {
+void DecodeComponent::processCSDWork(const int32_t bitstreamId, const C2Work* work) {
+    // If input.buffers is not empty, the buffer should have meaningful content inside.
+    C2ConstLinearBlock linearBlock = work->input.buffers.front()->data().linearBlocks().front();
+    ALOG_ASSERT(linearBlock.size() > 0u, "Input buffer of work(%d) is empty.", bitstreamId);
+
+    if (mIntfImpl->getVideoCodec() == VideoCodec::VP9) {
+        // The VP9 decoder does not support and does not need the Codec Specific Data (CSD):
+        // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate.
+        // The most of its content (profile, level, bit depth and chroma subsampling)
+        // can be extracted directly from VP9 bitstream. Ignore CSD if it was passed.
+        reportWorkIfFinished(bitstreamId);
+        return;
+    } else if ((!mIsSecure && mIntfImpl->getVideoCodec() == VideoCodec::H264) ||
+               mIntfImpl->getVideoCodec() == VideoCodec::HEVC) {
+        // Try to parse color aspects from bitstream for CSD work of non-secure H264 codec or HEVC
+        // codec (HEVC will only be CENCv3 which is parseable for secure).
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        if (parseCodedColorAspects(linearBlock, mIntfImpl->getVideoCodec(), &codedAspects)) {
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t status = mIntfImpl->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+            if (status != C2_OK) {
+                ALOGE("Failed to config color aspects to interface: %d", status);
+                reportError(status);
+                return;
+            }
+            // Record current frame index, color aspects should be updated only for output
+            // buffers whose frame indices are not less than this one.
+            mPendingColorAspectsChange = true;
+            mPendingColorAspectsChangeFrameIndex = work->input.ordinal.frameIndex.peeku();
+        }
+    }
+
+    processWorkBuffer(bitstreamId, linearBlock);
+}
+
+void DecodeComponent::processWork(const int32_t bitstreamId, const C2Work* work) {
+    // If input.buffers is not empty, the buffer should have meaningful content inside.
+    C2ConstLinearBlock linearBlock = work->input.buffers.front()->data().linearBlocks().front();
+    ALOG_ASSERT(linearBlock.size() > 0u, "Input buffer of work(%d) is empty.", bitstreamId);
+
+    processWorkBuffer(bitstreamId, linearBlock);
+}
+
+void DecodeComponent::processWorkBuffer(const int32_t bitstreamId,
+                                        const C2ConstLinearBlock& linearBlock) {
+    std::unique_ptr<ConstBitstreamBuffer> buffer = std::make_unique<ConstBitstreamBuffer>(
+            bitstreamId, linearBlock, linearBlock.offset(), linearBlock.size());
+    if (!buffer) {
+        reportError(C2_CORRUPTED);
+        return;
+    }
+    mDecoder->decode(std::move(buffer),
+                     ::base::BindOnce(&DecodeComponent::onDecodeDone, mWeakThis, bitstreamId));
+}
+
+void DecodeComponent::onDecodeDone(int32_t bitstreamId, VideoDecoder::DecodeStatus status) {
+    ATRACE_CALL();
     ALOGV("%s(bitstreamId=%d, status=%s)", __func__, bitstreamId,
           VideoDecoder::DecodeStatusToString(status));
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
@@ -548,7 +506,7 @@ void V4L2DecodeComponent::onDecodeDone(int32_t bitstreamId, VideoDecoder::Decode
     }
 }
 
-void V4L2DecodeComponent::onOutputFrameReady(std::unique_ptr<VideoFrame> frame) {
+void DecodeComponent::onOutputFrameReady(std::unique_ptr<VideoFrame> frame) {
     ALOGV("%s(bitstreamId=%d)", __func__, frame->getBitstreamId());
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -583,8 +541,9 @@ void V4L2DecodeComponent::onOutputFrameReady(std::unique_ptr<VideoFrame> frame)
     pumpReportWork();
 }
 
-void V4L2DecodeComponent::detectNoShowFrameWorksAndReportIfFinished(
+void DecodeComponent::detectNoShowFrameWorksAndReportIfFinished(
         const C2WorkOrdinalStruct& currOrdinal) {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -613,7 +572,8 @@ void V4L2DecodeComponent::detectNoShowFrameWorksAndReportIfFinished(
     for (const int32_t bitstreamId : noShowFrameBitstreamIds) reportWorkIfFinished(bitstreamId);
 }
 
-void V4L2DecodeComponent::pumpReportWork() {
+void DecodeComponent::pumpReportWork() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -623,7 +583,8 @@ void V4L2DecodeComponent::pumpReportWork() {
     }
 }
 
-bool V4L2DecodeComponent::reportWorkIfFinished(int32_t bitstreamId) {
+bool DecodeComponent::reportWorkIfFinished(int32_t bitstreamId) {
+    ATRACE_CALL();
     ALOGV("%s(bitstreamId = %d)", __func__, bitstreamId);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -657,7 +618,8 @@ bool V4L2DecodeComponent::reportWorkIfFinished(int32_t bitstreamId) {
     return reportWork(std::move(work));
 }
 
-bool V4L2DecodeComponent::reportEOSWork() {
+bool DecodeComponent::reportEOSWork() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -691,7 +653,8 @@ bool V4L2DecodeComponent::reportEOSWork() {
     return reportWork(std::move(eosWork));
 }
 
-bool V4L2DecodeComponent::reportWork(std::unique_ptr<C2Work> work) {
+bool DecodeComponent::reportWork(std::unique_ptr<C2Work> work) {
+    ATRACE_CALL();
     ALOGV("%s(work=%llu)", __func__, work->input.ordinal.frameIndex.peekull());
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -700,14 +663,19 @@ bool V4L2DecodeComponent::reportWork(std::unique_ptr<C2Work> work) {
         return false;
     }
 
+    if (ATRACE_ENABLED()) {
+        const std::string atraceLabel = ::base::StringPrintf("#%u C2Work", mDebugStreamId);
+        ATRACE_ASYNC_END(atraceLabel.c_str(), work->input.ordinal.frameIndex.peekull());
+    }
     std::list<std::unique_ptr<C2Work>> finishedWorks;
     finishedWorks.emplace_back(std::move(work));
     mListener->onWorkDone_nb(weak_from_this(), std::move(finishedWorks));
     return true;
 }
 
-c2_status_t V4L2DecodeComponent::flush_sm(
-        flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const /* flushedWork */) {
+c2_status_t DecodeComponent::flush_sm(flush_mode_t mode,
+                                      std::list<std::unique_ptr<C2Work>>* const /* flushedWork */) {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
 
     auto currentState = mComponentState.load();
@@ -720,11 +688,12 @@ c2_status_t V4L2DecodeComponent::flush_sm(
     }
 
     mDecoderTaskRunner->PostTask(FROM_HERE,
-                                 ::base::BindOnce(&V4L2DecodeComponent::flushTask, mWeakThis));
+                                 ::base::BindOnce(&DecodeComponent::flushTask, mWeakThis));
     return C2_OK;
 }
 
-void V4L2DecodeComponent::flushTask() {
+void DecodeComponent::flushTask() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -735,7 +704,7 @@ void V4L2DecodeComponent::flushTask() {
     mIsDraining = false;
 }
 
-void V4L2DecodeComponent::reportAbandonedWorks() {
+void DecodeComponent::reportAbandonedWorks() {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -756,6 +725,11 @@ void V4L2DecodeComponent::reportAbandonedWorks() {
         if (!work->input.buffers.empty()) {
             work->input.buffers.front().reset();
         }
+
+        if (ATRACE_ENABLED()) {
+            const std::string atraceLabel = ::base::StringPrintf("#%u C2Work", mDebugStreamId);
+            ATRACE_ASYNC_END(atraceLabel.c_str(), work->input.ordinal.frameIndex.peekull());
+        }
     }
     if (!abandonedWorks.empty()) {
         if (!mListener) {
@@ -766,7 +740,7 @@ void V4L2DecodeComponent::reportAbandonedWorks() {
     }
 }
 
-c2_status_t V4L2DecodeComponent::drain_nb(drain_mode_t mode) {
+c2_status_t DecodeComponent::drain_nb(drain_mode_t mode) {
     ALOGV("%s(mode=%u)", __func__, mode);
 
     auto currentState = mComponentState.load();
@@ -784,12 +758,13 @@ c2_status_t V4L2DecodeComponent::drain_nb(drain_mode_t mode) {
 
     case DRAIN_COMPONENT_WITH_EOS:
         mDecoderTaskRunner->PostTask(FROM_HERE,
-                                     ::base::BindOnce(&V4L2DecodeComponent::drainTask, mWeakThis));
+                                     ::base::BindOnce(&DecodeComponent::drainTask, mWeakThis));
         return C2_OK;
     }
 }
 
-void V4L2DecodeComponent::drainTask() {
+void DecodeComponent::drainTask() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -802,12 +777,12 @@ void V4L2DecodeComponent::drainTask() {
 
     if (!mWorksAtDecoder.empty()) {
         ALOGV("Drain the pending works at the decoder.");
-        mDecoder->drain(::base::BindOnce(&V4L2DecodeComponent::onDrainDone, mWeakThis));
+        mDecoder->drain(::base::BindOnce(&DecodeComponent::onDrainDone, mWeakThis));
         mIsDraining = true;
     }
 }
 
-void V4L2DecodeComponent::onDrainDone(VideoDecoder::DecodeStatus status) {
+void DecodeComponent::onDrainDone(VideoDecoder::DecodeStatus status) {
     ALOGV("%s(status=%s)", __func__, VideoDecoder::DecodeStatusToString(status));
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -827,12 +802,12 @@ void V4L2DecodeComponent::onDrainDone(VideoDecoder::DecodeStatus status) {
         }
 
         mDecoderTaskRunner->PostTask(
-                FROM_HERE, ::base::BindOnce(&V4L2DecodeComponent::pumpPendingWorks, mWeakThis));
+                FROM_HERE, ::base::BindOnce(&DecodeComponent::pumpPendingWorks, mWeakThis));
         return;
     }
 }
 
-void V4L2DecodeComponent::reportError(c2_status_t error) {
+void DecodeComponent::reportError(c2_status_t error) {
     ALOGE("%s(error=%u)", __func__, static_cast<uint32_t>(error));
     ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -846,16 +821,16 @@ void V4L2DecodeComponent::reportError(c2_status_t error) {
     mListener->onError_nb(weak_from_this(), static_cast<uint32_t>(error));
 }
 
-c2_status_t V4L2DecodeComponent::announce_nb(const std::vector<C2WorkOutline>& /* items */) {
+c2_status_t DecodeComponent::announce_nb(const std::vector<C2WorkOutline>& /* items */) {
     return C2_OMITTED;  // Tunneling is not supported by now
 }
 
-std::shared_ptr<C2ComponentInterface> V4L2DecodeComponent::intf() {
+std::shared_ptr<C2ComponentInterface> DecodeComponent::intf() {
     return mIntf;
 }
 
 // static
-const char* V4L2DecodeComponent::ComponentStateToString(ComponentState state) {
+const char* DecodeComponent::ComponentStateToString(ComponentState state) {
     switch (state) {
     case ComponentState::STOPPED:
         return "STOPPED";
diff --git a/components/V4L2DecodeInterface.cpp b/components/DecodeInterface.cpp
similarity index 51%
rename from components/V4L2DecodeInterface.cpp
rename to components/DecodeInterface.cpp
index 32483bea84725d5e3ad2b71f0a1a021136aa55d3..3f0706a265b799eea6f49ca399877a7c58560f0a 100644
--- a/components/V4L2DecodeInterface.cpp
+++ b/components/DecodeInterface.cpp
@@ -1,11 +1,11 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "V4L2DecodeInterface"
+#define LOG_TAG "DecodeInterface"
 
-#include <v4l2_codec2/components/V4L2DecodeInterface.h>
+#include <v4l2_codec2/components/DecodeInterface.h>
 
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
@@ -13,8 +13,7 @@
 #include <log/log.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
-#include <v4l2_codec2/common/V4L2ComponentCommon.h>
-#include <v4l2_codec2/common/V4L2Device.h>
+#include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/plugin_store/V4L2AllocatorId.h>
 
 namespace android {
@@ -27,20 +26,6 @@ constexpr size_t kInputBufferSizeFor1080p = 1024 * 1024;  // 1MB
 // Input bitstream buffer size for up to 4k streams.
 constexpr size_t kInputBufferSizeFor4K = 4 * kInputBufferSizeFor1080p;
 
-std::optional<VideoCodec> getCodecFromComponentName(const std::string& name) {
-    if (name == V4L2ComponentName::kH264Decoder || name == V4L2ComponentName::kH264SecureDecoder)
-        return VideoCodec::H264;
-    if (name == V4L2ComponentName::kVP8Decoder || name == V4L2ComponentName::kVP8SecureDecoder)
-        return VideoCodec::VP8;
-    if (name == V4L2ComponentName::kVP9Decoder || name == V4L2ComponentName::kVP9SecureDecoder)
-        return VideoCodec::VP9;
-    if (name == V4L2ComponentName::kHEVCDecoder || name == V4L2ComponentName::kHEVCSecureDecoder)
-        return VideoCodec::HEVC;
-
-    ALOGE("Unknown name: %s", name.c_str());
-    return std::nullopt;
-}
-
 size_t calculateInputBufferSize(size_t area) {
     if (area > k4KArea) {
         ALOGW("Input buffer size for video size (%zu) larger than 4K (%zu) might be too small.",
@@ -54,24 +39,29 @@ size_t calculateInputBufferSize(size_t area) {
 }  // namespace
 
 // static
-C2R V4L2DecodeInterface::ProfileLevelSetter(bool /* mayBlock */,
-                                            C2P<C2StreamProfileLevelInfo::input>& info) {
+C2R DecodeInterface::ProfileLevelSetter(bool /* mayBlock */,
+                                        C2P<C2StreamProfileLevelInfo::input>& info) {
     return info.F(info.v.profile)
             .validatePossible(info.v.profile)
             .plus(info.F(info.v.level).validatePossible(info.v.level));
 }
 
 // static
-C2R V4L2DecodeInterface::SizeSetter(bool /* mayBlock */,
-                                    C2P<C2StreamPictureSizeInfo::output>& videoSize) {
+C2R DecodeInterface::SizeSetter(bool /* mayBlock */,
+                                C2P<C2StreamPictureSizeInfo::output>& videoSize) {
     return videoSize.F(videoSize.v.width)
             .validatePossible(videoSize.v.width)
             .plus(videoSize.F(videoSize.v.height).validatePossible(videoSize.v.height));
 }
 
+C2R DecodeInterface::InputSizeSetter(bool /* mayBlock */,
+                                     C2P<C2StreamMaxBufferSizeInfo::input>& inputSize) {
+    return inputSize.F(inputSize.v.value).validatePossible(inputSize.v.value);
+}
+
 // static
 template <typename T>
-C2R V4L2DecodeInterface::DefaultColorAspectsSetter(bool /* mayBlock */, C2P<T>& def) {
+C2R DecodeInterface::DefaultColorAspectsSetter(bool /* mayBlock */, C2P<T>& def) {
     if (def.v.range > C2Color::RANGE_OTHER) {
         def.set().range = C2Color::RANGE_OTHER;
     }
@@ -88,10 +78,10 @@ C2R V4L2DecodeInterface::DefaultColorAspectsSetter(bool /* mayBlock */, C2P<T>&
 }
 
 // static
-C2R V4L2DecodeInterface::MergedColorAspectsSetter(
-        bool /* mayBlock */, C2P<C2StreamColorAspectsInfo::output>& merged,
-        const C2P<C2StreamColorAspectsTuning::output>& def,
-        const C2P<C2StreamColorAspectsInfo::input>& coded) {
+C2R DecodeInterface::MergedColorAspectsSetter(bool /* mayBlock */,
+                                              C2P<C2StreamColorAspectsInfo::output>& merged,
+                                              const C2P<C2StreamColorAspectsTuning::output>& def,
+                                              const C2P<C2StreamColorAspectsInfo::input>& coded) {
     // Take coded values for all specified fields, and default values for unspecified ones.
     merged.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
     merged.set().primaries =
@@ -103,58 +93,123 @@ C2R V4L2DecodeInterface::MergedColorAspectsSetter(
 }
 
 // static
-C2R V4L2DecodeInterface::MaxInputBufferSizeCalculator(
+C2R DecodeInterface::MaxInputBufferSizeCalculator(
         bool /* mayBlock */, C2P<C2StreamMaxBufferSizeInfo::input>& me,
         const C2P<C2StreamPictureSizeInfo::output>& size) {
-    me.set().value = calculateInputBufferSize(size.v.width * size.v.height);
+    size_t calculatedSize = calculateInputBufferSize(size.v.width * size.v.height);
+
+    if (me.v.value < calculatedSize) me.set().value = calculatedSize;
+
     return C2R::Ok();
 }
 
-V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
-                                         const std::shared_ptr<C2ReflectorHelper>& helper)
-      : C2InterfaceHelper(helper), mInitStatus(C2_OK) {
+DecodeInterface::DecodeInterface(const std::string& name,
+                                 const std::shared_ptr<C2ReflectorHelper>& helper,
+                                 const SupportedCapabilities& caps)
+      : C2InterfaceHelper(helper), mInitStatus(C2_OK), mVideoCodec(caps.codec) {
     ALOGV("%s(%s)", __func__, name.c_str());
 
     setDerivedInstance(this);
 
-    mVideoCodec = getCodecFromComponentName(name);
-    if (!mVideoCodec) {
-        ALOGE("Invalid component name: %s", name.c_str());
-        mInitStatus = C2_BAD_VALUE;
-        return;
-    }
-
     addParameter(DefineParam(mKind, C2_PARAMKEY_COMPONENT_KIND)
                          .withConstValue(new C2ComponentKindSetting(C2Component::KIND_DECODER))
                          .build());
 
     std::string inputMime;
+
+    ui::Size maxSize(1, 1);
+
+    std::vector<uint32_t> profiles;
+    for (const auto& supportedProfile : caps.supportedProfiles) {
+        if (isValidProfileForCodec(mVideoCodec.value(), supportedProfile.profile)) {
+            profiles.push_back(static_cast<uint32_t>(supportedProfile.profile));
+            maxSize.setWidth(std::max(maxSize.width, supportedProfile.max_resolution.width));
+            maxSize.setHeight(std::max(maxSize.height, supportedProfile.max_resolution.height));
+        }
+    }
+
+    // In case of no supported profile or uninitialized device maxSize is set to default
+    if (maxSize == ui::Size(1, 1)) maxSize = ui::Size(4096, 4096);
+
+    if (profiles.empty()) {
+        ALOGW("No supported profiles for H264 codec");
+        switch (*mVideoCodec) {  //default values used when querry is not supported
+        case VideoCodec::H264:
+            profiles = {
+                    C2Config::PROFILE_AVC_BASELINE,
+                    C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
+                    C2Config::PROFILE_AVC_MAIN,
+                    C2Config::PROFILE_AVC_HIGH,
+            };
+            break;
+        case VideoCodec::VP8:
+            profiles = {C2Config::PROFILE_VP8_0};
+            break;
+        case VideoCodec::VP9:
+            profiles = {C2Config::PROFILE_VP9_0};
+            break;
+        case VideoCodec::HEVC:
+            profiles = {C2Config::PROFILE_HEVC_MAIN};
+            break;
+        }
+    }
+
+    uint32_t defaultProfile = caps.defaultProfile;
+    if (defaultProfile == C2Config::PROFILE_UNUSED)
+        defaultProfile = *std::min_element(profiles.begin(), profiles.end());
+
+    std::vector<unsigned int> levels;
+    std::vector<C2Config::level_t> supportedLevels = caps.supportedLevels;
+    for (const auto& supportedLevel : supportedLevels) {
+        levels.push_back(static_cast<unsigned int>(supportedLevel));
+    }
+
+    if (levels.empty()) {
+        ALOGE("No supported levels for H264 codec");
+        switch (*mVideoCodec) {  //default values used when querry is not supported
+        case VideoCodec::H264:
+            levels = {C2Config::LEVEL_AVC_1,   C2Config::LEVEL_AVC_1B,  C2Config::LEVEL_AVC_1_1,
+                      C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
+                      C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2, C2Config::LEVEL_AVC_3,
+                      C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
+                      C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2, C2Config::LEVEL_AVC_5,
+                      C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2};
+            break;
+        case VideoCodec::VP8:
+            levels = {C2Config::LEVEL_UNUSED};
+            break;
+        case VideoCodec::VP9:
+            levels = {C2Config::LEVEL_VP9_1,   C2Config::LEVEL_VP9_1_1, C2Config::LEVEL_VP9_2,
+                      C2Config::LEVEL_VP9_2_1, C2Config::LEVEL_VP9_3,   C2Config::LEVEL_VP9_3_1,
+                      C2Config::LEVEL_VP9_4,   C2Config::LEVEL_VP9_4_1, C2Config::LEVEL_VP9_5};
+            break;
+        case VideoCodec::HEVC:
+            levels = {C2Config::LEVEL_HEVC_MAIN_1,   C2Config::LEVEL_HEVC_MAIN_2,
+                      C2Config::LEVEL_HEVC_MAIN_2_1, C2Config::LEVEL_HEVC_MAIN_3,
+                      C2Config::LEVEL_HEVC_MAIN_3_1, C2Config::LEVEL_HEVC_MAIN_4,
+                      C2Config::LEVEL_HEVC_MAIN_4_1, C2Config::LEVEL_HEVC_MAIN_5,
+                      C2Config::LEVEL_HEVC_MAIN_5_1, C2Config::LEVEL_HEVC_MAIN_5_2,
+                      C2Config::LEVEL_HEVC_MAIN_6,   C2Config::LEVEL_HEVC_MAIN_6_1,
+                      C2Config::LEVEL_HEVC_MAIN_6_2};
+            break;
+        }
+    }
+
+    uint32_t defaultLevel = caps.defaultLevel;
+    if (defaultLevel == C2Config::LEVEL_UNUSED)
+        defaultLevel = *std::min_element(levels.begin(), levels.end());
+
     switch (*mVideoCodec) {
     case VideoCodec::H264:
         inputMime = MEDIA_MIMETYPE_VIDEO_AVC;
-        addParameter(
-                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                        .withDefault(new C2StreamProfileLevelInfo::input(
-                                0u, C2Config::PROFILE_AVC_MAIN, C2Config::LEVEL_AVC_4))
-                        .withFields(
-                                {C2F(mProfileLevel, profile)
-                                         .oneOf({C2Config::PROFILE_AVC_BASELINE,
-                                                 C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
-                                                 C2Config::PROFILE_AVC_MAIN,
-                                                 C2Config::PROFILE_AVC_HIGH,
-                                                 C2Config::PROFILE_AVC_CONSTRAINED_HIGH}),
-                                 C2F(mProfileLevel, level)
-                                         .oneOf({C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B,
-                                                 C2Config::LEVEL_AVC_1_1, C2Config::LEVEL_AVC_1_2,
-                                                 C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
-                                                 C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2,
-                                                 C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1,
-                                                 C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
-                                                 C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2,
-                                                 C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1,
-                                                 C2Config::LEVEL_AVC_5_2})})
-                        .withSetter(ProfileLevelSetter)
-                        .build());
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, static_cast<C2Config::profile_t>(defaultProfile),
+                                     static_cast<C2Config::level_t>(defaultLevel)))
+                             .withFields({C2F(mProfileLevel, profile).oneOf(profiles),
+                                          C2F(mProfileLevel, level).oneOf(levels)})
+                             .withSetter(ProfileLevelSetter)
+                             .build());
         break;
 
     case VideoCodec::VP8:
@@ -167,48 +222,26 @@ V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
 
     case VideoCodec::VP9:
         inputMime = MEDIA_MIMETYPE_VIDEO_VP9;
-        addParameter(
-                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                        .withDefault(new C2StreamProfileLevelInfo::input(
-                                0u, C2Config::PROFILE_VP9_0, C2Config::LEVEL_VP9_5))
-                        .withFields({C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_VP9_0}),
-                                     C2F(mProfileLevel, level)
-                                             .oneOf({C2Config::LEVEL_VP9_1, C2Config::LEVEL_VP9_1_1,
-                                                     C2Config::LEVEL_VP9_2, C2Config::LEVEL_VP9_2_1,
-                                                     C2Config::LEVEL_VP9_3, C2Config::LEVEL_VP9_3_1,
-                                                     C2Config::LEVEL_VP9_4, C2Config::LEVEL_VP9_4_1,
-                                                     C2Config::LEVEL_VP9_5})})
-                        .withSetter(ProfileLevelSetter)
-                        .build());
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, static_cast<C2Config::profile_t>(defaultProfile),
+                                     static_cast<C2Config::level_t>(defaultLevel)))
+                             .withFields({C2F(mProfileLevel, profile).oneOf(profiles),
+                                          C2F(mProfileLevel, level).oneOf(levels)})
+                             .withSetter(ProfileLevelSetter)
+                             .build());
         break;
 
     case VideoCodec::HEVC:
         inputMime = MEDIA_MIMETYPE_VIDEO_HEVC;
-        addParameter(
-                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                        .withDefault(new C2StreamProfileLevelInfo::input(
-                                0u, C2Config::PROFILE_HEVC_MAIN, C2Config::LEVEL_HEVC_MAIN_5_1))
-                        .withFields({C2F(mProfileLevel, profile)
-                                             .oneOf({C2Config::PROFILE_HEVC_MAIN,
-                                                     C2Config::PROFILE_HEVC_MAIN_STILL}),
-                                     C2F(mProfileLevel, level)
-                                             .oneOf({C2Config::LEVEL_HEVC_MAIN_1,
-                                                     C2Config::LEVEL_HEVC_MAIN_2,
-                                                     C2Config::LEVEL_HEVC_MAIN_2_1,
-                                                     C2Config::LEVEL_HEVC_MAIN_3,
-                                                     C2Config::LEVEL_HEVC_MAIN_3_1,
-                                                     C2Config::LEVEL_HEVC_MAIN_4,
-                                                     C2Config::LEVEL_HEVC_MAIN_4_1,
-                                                     C2Config::LEVEL_HEVC_MAIN_5,
-                                                     C2Config::LEVEL_HEVC_MAIN_5_1,
-                                                     C2Config::LEVEL_HEVC_MAIN_5_2,
-                                                     C2Config::LEVEL_HEVC_HIGH_4,
-                                                     C2Config::LEVEL_HEVC_HIGH_4_1,
-                                                     C2Config::LEVEL_HEVC_HIGH_5,
-                                                     C2Config::LEVEL_HEVC_HIGH_5_1,
-                                                     C2Config::LEVEL_HEVC_HIGH_5_2})})
-                        .withSetter(ProfileLevelSetter)
-                        .build());
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, static_cast<C2Config::profile_t>(defaultProfile),
+                                     static_cast<C2Config::level_t>(defaultLevel)))
+                             .withFields({C2F(mProfileLevel, profile).oneOf(profiles),
+                                          C2F(mProfileLevel, level).oneOf(levels)})
+                             .withSetter(ProfileLevelSetter)
+                             .build());
         break;
     }
 
@@ -232,6 +265,17 @@ V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
                     .withConstValue(new C2PortDelayTuning::output(getOutputDelay(*mVideoCodec)))
                     .build());
 
+    // This value is set according to the relation between kNumInputBuffers = 16 and the current
+    // codec2 framework implementation. Specifically, this generally limits the framework to using
+    // <= 16 input buffers, although certain timing of events can result in a few more input buffers
+    // being allocated but rarely used. This lets us avoid remapping v4l2 input buffers and DMA
+    // buffers in the common case. We could go up to 4 here, to limit the framework to
+    // simultaneously enqueuing 16 input buffers, but there doesn't seem to be much of an a
+    // performance improvement from that.
+    addParameter(DefineParam(mPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY)
+                         .withConstValue(new C2PipelineDelayTuning(3))
+                         .build());
+
     addParameter(DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
                          .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
                                  inputMime.c_str()))
@@ -246,10 +290,11 @@ V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
     // In order to fasten the bootup time, we use the maximum supported size instead of querying the
     // capability from the V4L2 device.
     addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                         .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                         .withDefault(new C2StreamPictureSizeInfo::output(
+                                 0u, std::min(320, maxSize.width), std::min(240, maxSize.height)))
                          .withFields({
-                                 C2F(mSize, width).inRange(16, 4096, 16),
-                                 C2F(mSize, height).inRange(16, 4096, 16),
+                                 C2F(mSize, width).inRange(16, maxSize.width, 16),
+                                 C2F(mSize, height).inRange(16, maxSize.height, 16),
                          })
                          .withSetter(SizeSetter)
                          .build());
@@ -260,6 +305,7 @@ V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
                     .withFields({
                             C2F(mMaxInputSize, value).any(),
                     })
+                    .withSetter(InputSizeSetter)
                     .calculatedAs(MaxInputBufferSizeCalculator, mSize)
                     .build());
 
@@ -332,31 +378,36 @@ V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
                     .withSetter(DefaultColorAspectsSetter)
                     .build());
 
-    addParameter(
-            DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
-                    .withDefault(new C2StreamColorAspectsInfo::output(
-                            0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
-                            C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
-                    .withFields(
-                            {C2F(mColorAspects, range)
-                                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
-                             C2F(mColorAspects, primaries)
-                                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
-                                              C2Color::PRIMARIES_OTHER),
-                             C2F(mColorAspects, transfer)
-                                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
-                                              C2Color::TRANSFER_OTHER),
-                             C2F(mColorAspects, matrix)
-                                     .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
-                    .withSetter(MergedColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
-                    .build());
+    // At this moment v4l2_codec2 support decoding this information only for
+    // unprotected H264 and both protected and unprotected HEVC.
+    if ((mVideoCodec == VideoCodec::H264 && !secureMode) || mVideoCodec == VideoCodec::HEVC) {
+        addParameter(DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(MergedColorAspectsSetter, mDefaultColorAspects,
+                                         mCodedColorAspects)
+                             .build());
+    }
 }
 
-size_t V4L2DecodeInterface::getInputBufferSize() const {
-    return calculateInputBufferSize(mSize->width * mSize->height);
+size_t DecodeInterface::getInputBufferSize() const {
+    return mMaxInputSize->value;
 }
 
-c2_status_t V4L2DecodeInterface::queryColorAspects(
+c2_status_t DecodeInterface::queryColorAspects(
         std::shared_ptr<C2StreamColorAspectsInfo::output>* targetColorAspects) {
     std::unique_ptr<C2StreamColorAspectsInfo::output> colorAspects =
             std::make_unique<C2StreamColorAspectsInfo::output>(
@@ -369,7 +420,7 @@ c2_status_t V4L2DecodeInterface::queryColorAspects(
     return status;
 }
 
-uint32_t V4L2DecodeInterface::getOutputDelay(VideoCodec codec) {
+uint32_t DecodeInterface::getOutputDelay(VideoCodec codec) {
     switch (codec) {
     case VideoCodec::H264:
         // Due to frame reordering an H264 decoder might need multiple additional input frames to be
@@ -380,9 +431,17 @@ uint32_t V4L2DecodeInterface::getOutputDelay(VideoCodec codec) {
     case VideoCodec::HEVC:
         return 16;
     case VideoCodec::VP8:
-        return 0;
+        // The decoder might held a few frames as a reference for decoding. Since Android T
+        // the Codec2 is more prone to timeout the component if one is not producing frames. This
+        // might especially occur when those frames are held for reference and playback/decoding
+        // is paused. With increased output delay we inform Codec2 not to timeout the component,
+        // if number of frames in components is less then the number of maximum reference frames
+        // that could be held by decoder.
+        // Reference: RFC 6386 Section 3. Compressed Frame Types
+        return 3;
     case VideoCodec::VP9:
-        return 0;
+        // Reference: https://www.webmproject.org/vp9/levels/
+        return 8;
     }
 }
 
diff --git a/components/V4L2EncodeComponent.cpp b/components/EncodeComponent.cpp
similarity index 77%
rename from components/V4L2EncodeComponent.cpp
rename to components/EncodeComponent.cpp
index b266a6e65e7dc8bc9d67faac1e64aad93cb43453..0c7d0445109339110e4b00427f42c440890916ea 100644
--- a/components/V4L2EncodeComponent.cpp
+++ b/components/EncodeComponent.cpp
@@ -1,11 +1,11 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
+// found in the LICENSE file
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "V4L2EncodeComponent"
+#define LOG_TAG "EncodeComponent"
 
-#include <v4l2_codec2/components/V4L2EncodeComponent.h>
+#include <v4l2_codec2/components/EncodeComponent.h>
 
 #include <inttypes.h>
 
@@ -18,30 +18,36 @@
 #include <android/hardware/graphics/common/1.0/types.h>
 #include <base/bind.h>
 #include <base/bind_helpers.h>
-#include <cutils/properties.h>
 #include <log/log.h>
 #include <media/stagefright/MediaDefs.h>
 #include <ui/GraphicBuffer.h>
 #include <ui/Size.h>
 
-#include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/common/EncodeHelpers.h>
 #include <v4l2_codec2/common/FormatConverter.h>
-#include <v4l2_codec2/common/VideoPixelFormat.h>
 #include <v4l2_codec2/components/BitstreamBuffer.h>
-#include <v4l2_codec2/components/V4L2EncodeInterface.h>
-#include <v4l2_codec2/components/V4L2Encoder.h>
+#include <v4l2_codec2/components/EncodeInterface.h>
+#include <v4l2_codec2/components/VideoEncoder.h>
 
 using android::hardware::graphics::common::V1_0::BufferUsage;
 
 namespace android {
 
 namespace {
+// Create an input frame from the specified graphic block.
+std::unique_ptr<VideoEncoder::InputFrame> createInputFrame(
+        const C2ConstGraphicBlock& block, VideoPixelFormat format,
+        const std::vector<VideoFramePlane>& planes, uint64_t index, int64_t timestamp) {
+    std::vector<int> fds;
+    const C2Handle* const handle = block.handle();
+    for (int i = 0; i < handle->numFds; i++) {
+        fds.emplace_back(handle->data[i]);
+    }
 
-const VideoPixelFormat kInputPixelFormat = VideoPixelFormat::NV12;
-
-// The peak bitrate in function of the target bitrate, used when the bitrate mode is VBR.
-constexpr uint32_t kPeakBitrateMultiplier = 2u;
+    return std::make_unique<VideoEncoder::InputFrame>(std::move(fds), planes, format, index,
+                                                      timestamp);
+}
+}  // namespace
 
 // Get the video frame layout from the specified |inputBlock|.
 // TODO(dstaessens): Clean up code extracting layout from a C2GraphicBlock.
@@ -58,7 +64,7 @@ std::optional<std::vector<VideoFramePlane>> getVideoFrameLayout(const C2ConstGra
     // IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by using
     // ImplDefinedToRGBXMap in the case.
     if (layout.type == C2PlanarLayout::TYPE_UNKNOWN) {
-        std::unique_ptr<ImplDefinedToRGBXMap> idMap = ImplDefinedToRGBXMap::Create(block);
+        std::unique_ptr<ImplDefinedToRGBXMap> idMap = ImplDefinedToRGBXMap::create(block);
         if (idMap == nullptr) {
             ALOGE("Unable to parse RGBX_8888 from IMPLEMENTATION_DEFINED");
             return std::nullopt;
@@ -169,94 +175,35 @@ std::optional<uint32_t> getVideoFrameStride(VideoPixelFormat format, ui::Size si
     return planes.value()[0].mStride;
 }
 
-// Create an input frame from the specified graphic block.
-std::unique_ptr<V4L2Encoder::InputFrame> CreateInputFrame(const C2ConstGraphicBlock& block,
-                                                          uint64_t index, int64_t timestamp) {
-    VideoPixelFormat format;
-    std::optional<std::vector<VideoFramePlane>> planes = getVideoFrameLayout(block, &format);
-    if (!planes) {
-        ALOGE("Failed to get input block's layout");
-        return nullptr;
-    }
-
-    std::vector<int> fds;
-    const C2Handle* const handle = block.handle();
-    for (int i = 0; i < handle->numFds; i++) {
-        fds.emplace_back(handle->data[i]);
-    }
-
-    return std::make_unique<V4L2Encoder::InputFrame>(std::move(fds), std::move(planes.value()),
-                                                     format, index, timestamp);
-}
-
-// Check whether the specified |profile| is an H.264 profile.
-bool IsH264Profile(C2Config::profile_t profile) {
-    return (profile >= C2Config::PROFILE_AVC_BASELINE &&
-            profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH);
-}
-
-}  // namespace
-
-// static
-std::atomic<int32_t> V4L2EncodeComponent::sConcurrentInstances = 0;
-
-// static
-std::shared_ptr<C2Component> V4L2EncodeComponent::create(
-        C2String name, c2_node_id_t id, std::shared_ptr<C2ReflectorHelper> helper,
-        C2ComponentFactory::ComponentDeleter deleter) {
-    ALOGV("%s(%s)", __func__, name.c_str());
-
-    static const int32_t kMaxConcurrentInstances =
-            property_get_int32("ro.vendor.v4l2_codec2.encode_concurrent_instances", -1);
-
-    static std::mutex mutex;
-    std::lock_guard<std::mutex> lock(mutex);
-    if (kMaxConcurrentInstances >= 0 && sConcurrentInstances.load() >= kMaxConcurrentInstances) {
-        ALOGW("Cannot create additional encoder, maximum number of instances reached: %d",
-              kMaxConcurrentInstances);
-        return nullptr;
-    }
-
-    auto interface = std::make_shared<V4L2EncodeInterface>(name, std::move(helper));
-    if (interface->status() != C2_OK) {
-        ALOGE("Component interface initialization failed (error code %d)", interface->status());
-        return nullptr;
-    }
-
-    return std::shared_ptr<C2Component>(new V4L2EncodeComponent(name, id, std::move(interface)),
-                                        deleter);
-}
-
-V4L2EncodeComponent::V4L2EncodeComponent(C2String name, c2_node_id_t id,
-                                         std::shared_ptr<V4L2EncodeInterface> interface)
+EncodeComponent::EncodeComponent(C2String name, c2_node_id_t id,
+                                 std::shared_ptr<EncodeInterface> interface)
       : mName(name),
         mId(id),
         mInterface(std::move(interface)),
         mComponentState(ComponentState::LOADED) {
     ALOGV("%s(%s)", __func__, name.c_str());
-
-    sConcurrentInstances.fetch_add(1, std::memory_order_relaxed);
 }
 
-V4L2EncodeComponent::~V4L2EncodeComponent() {
+EncodeComponent::~EncodeComponent() {
     ALOGV("%s()", __func__);
 
     // Stop encoder thread and invalidate pointers if component wasn't stopped before destroying.
-    if (mEncoderThread.IsRunning()) {
+    if (mEncoderThread.IsRunning() && !mEncoderTaskRunner->RunsTasksInCurrentSequence()) {
         mEncoderTaskRunner->PostTask(
                 FROM_HERE, ::base::BindOnce(
-                                   [](::base::WeakPtrFactory<V4L2EncodeComponent>* weakPtrFactory) {
+                                   [](::base::WeakPtrFactory<EncodeComponent>* weakPtrFactory,
+                                      std::unique_ptr<VideoEncoder>* encoder) {
                                        weakPtrFactory->InvalidateWeakPtrs();
+                                       encoder->reset();
                                    },
-                                   &mWeakThisFactory));
+                                   &mWeakThisFactory, &mEncoder));
         mEncoderThread.Stop();
     }
 
-    sConcurrentInstances.fetch_sub(1, std::memory_order_relaxed);
     ALOGV("%s(): done", __func__);
 }
 
-c2_status_t V4L2EncodeComponent::start() {
+c2_status_t EncodeComponent::start() {
     ALOGV("%s()", __func__);
 
     // Lock while starting, to synchronize start/stop/reset/release calls.
@@ -278,7 +225,7 @@ c2_status_t V4L2EncodeComponent::start() {
     ::base::WaitableEvent done;
     bool success = false;
     mEncoderTaskRunner->PostTask(
-            FROM_HERE, ::base::Bind(&V4L2EncodeComponent::startTask, mWeakThis, &success, &done));
+            FROM_HERE, ::base::Bind(&EncodeComponent::startTask, mWeakThis, &success, &done));
     done.Wait();
 
     if (!success) {
@@ -290,7 +237,7 @@ c2_status_t V4L2EncodeComponent::start() {
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::stop() {
+c2_status_t EncodeComponent::stop() {
     ALOGV("%s()", __func__);
 
     // Lock while stopping, to synchronize start/stop/reset/release calls.
@@ -307,8 +254,8 @@ c2_status_t V4L2EncodeComponent::stop() {
 
     // Wait for the component to stop.
     ::base::WaitableEvent done;
-    mEncoderTaskRunner->PostTask(
-            FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::stopTask, mWeakThis, &done));
+    mEncoderTaskRunner->PostTask(FROM_HERE,
+                                 ::base::BindOnce(&EncodeComponent::stopTask, mWeakThis, &done));
     done.Wait();
     mEncoderThread.Stop();
 
@@ -318,7 +265,7 @@ c2_status_t V4L2EncodeComponent::stop() {
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::reset() {
+c2_status_t EncodeComponent::reset() {
     ALOGV("%s()", __func__);
 
     // The interface specification says: "This method MUST be supported in all (including tripped)
@@ -333,7 +280,7 @@ c2_status_t V4L2EncodeComponent::reset() {
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::release() {
+c2_status_t EncodeComponent::release() {
     ALOGV("%s()", __func__);
 
     // The interface specification says: "This method MUST be supported in stopped state.", but the
@@ -344,7 +291,7 @@ c2_status_t V4L2EncodeComponent::release() {
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
+c2_status_t EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
     ALOGV("%s()", __func__);
 
     if (mComponentState != ComponentState::RUNNING) {
@@ -354,7 +301,7 @@ c2_status_t V4L2EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* co
 
     while (!items->empty()) {
         mEncoderTaskRunner->PostTask(FROM_HERE,
-                                     ::base::BindOnce(&V4L2EncodeComponent::queueTask, mWeakThis,
+                                     ::base::BindOnce(&EncodeComponent::queueTask, mWeakThis,
                                                       std::move(items->front())));
         items->pop_front();
     }
@@ -362,7 +309,7 @@ c2_status_t V4L2EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* co
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::drain_nb(drain_mode_t mode) {
+c2_status_t EncodeComponent::drain_nb(drain_mode_t mode) {
     ALOGV("%s()", __func__);
 
     if (mode == DRAIN_CHAIN) {
@@ -373,13 +320,13 @@ c2_status_t V4L2EncodeComponent::drain_nb(drain_mode_t mode) {
         return C2_BAD_STATE;
     }
 
-    mEncoderTaskRunner->PostTask(
-            FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::drainTask, mWeakThis, mode));
+    mEncoderTaskRunner->PostTask(FROM_HERE,
+                                 ::base::BindOnce(&EncodeComponent::drainTask, mWeakThis, mode));
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::flush_sm(flush_mode_t mode,
-                                          std::list<std::unique_ptr<C2Work>>* const flushedWork) {
+c2_status_t EncodeComponent::flush_sm(flush_mode_t mode,
+                                      std::list<std::unique_ptr<C2Work>>* const flushedWork) {
     ALOGV("%s()", __func__);
 
     if (mode != FLUSH_COMPONENT) {
@@ -395,19 +342,19 @@ c2_status_t V4L2EncodeComponent::flush_sm(flush_mode_t mode,
     // immediately abandon all non-started work on the encoder thread. We can return all work that
     // can't be immediately discarded using onWorkDone() later.
     ::base::WaitableEvent done;
-    mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::flushTask,
-                                                             mWeakThis, &done, flushedWork));
+    mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::flushTask, mWeakThis,
+                                                             &done, flushedWork));
     done.Wait();
 
     return C2_OK;
 }
 
-c2_status_t V4L2EncodeComponent::announce_nb(const std::vector<C2WorkOutline>& items) {
+c2_status_t EncodeComponent::announce_nb(const std::vector<C2WorkOutline>& items) {
     return C2_OMITTED;  // Tunneling is not supported by now
 }
 
-c2_status_t V4L2EncodeComponent::setListener_vb(const std::shared_ptr<Listener>& listener,
-                                                c2_blocking_t mayBlock) {
+c2_status_t EncodeComponent::setListener_vb(const std::shared_ptr<Listener>& listener,
+                                            c2_blocking_t mayBlock) {
     ALOG_ASSERT(mComponentState != ComponentState::UNLOADED);
 
     // Lock so we're sure the component isn't currently starting or stopping.
@@ -425,18 +372,18 @@ c2_status_t V4L2EncodeComponent::setListener_vb(const std::shared_ptr<Listener>&
     ALOG_ASSERT(mayBlock == c2_blocking_t::C2_MAY_BLOCK);
 
     ::base::WaitableEvent done;
-    mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::setListenerTask,
+    mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::setListenerTask,
                                                              mWeakThis, listener, &done));
     done.Wait();
 
     return C2_OK;
 }
 
-std::shared_ptr<C2ComponentInterface> V4L2EncodeComponent::intf() {
-    return std::make_shared<SimpleInterface<V4L2EncodeInterface>>(mName.c_str(), mId, mInterface);
+std::shared_ptr<C2ComponentInterface> EncodeComponent::intf() {
+    return std::make_shared<SimpleInterface<EncodeInterface>>(mName.c_str(), mId, mInterface);
 }
 
-void V4L2EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done) {
+void EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -444,7 +391,7 @@ void V4L2EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done)
     done->Signal();
 }
 
-void V4L2EncodeComponent::stopTask(::base::WaitableEvent* done) {
+void EncodeComponent::stopTask(::base::WaitableEvent* done) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -452,6 +399,8 @@ void V4L2EncodeComponent::stopTask(::base::WaitableEvent* done) {
     flush();
 
     mInputFormatConverter.reset();
+    mInputPixelFormat = VideoPixelFormat::UNKNOWN;
+    mInputLayout.clear();
 
     mEncoder.reset();
     mOutputBlockPool.reset();
@@ -462,7 +411,7 @@ void V4L2EncodeComponent::stopTask(::base::WaitableEvent* done) {
     done->Signal();
 }
 
-void V4L2EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
+void EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
     ALOG_ASSERT(mEncoder);
@@ -496,6 +445,31 @@ void V4L2EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
         return;
     }
 
+    // If this is the first input frame, create an input format converter if the V4L2 device doesn't
+    // support the requested input format.
+    if ((mInputPixelFormat == VideoPixelFormat::UNKNOWN) && !work->input.buffers.empty()) {
+        VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
+        if (!getVideoFrameLayout(work->input.buffers.front()->data().graphicBlocks().front(),
+                                 &format)) {
+            ALOGE("Failed to get input block's layout");
+            reportError(C2_CORRUPTED);
+            return;
+        }
+        if (mEncoder->inputFormat() != format) {
+            ALOG_ASSERT(!mInputFormatConverter);
+            ALOGV("Creating input format convertor (%s)",
+                  videoPixelFormatToString(mEncoder->inputFormat()).c_str());
+            mInputFormatConverter =
+                    FormatConverter::create(mEncoder->inputFormat(), mEncoder->visibleSize(),
+                                            VideoEncoder::kInputBufferCount, mEncoder->codedSize());
+            if (!mInputFormatConverter) {
+                ALOGE("Failed to created input format convertor");
+                reportError(C2_CORRUPTED);
+                return;
+            }
+        }
+    }
+
     // If conversion is required but no free buffers are available we queue the work item.
     if (mInputFormatConverter && !mInputFormatConverter->isReady()) {
         ALOGV("Input format convertor ran out of buffers");
@@ -510,8 +484,8 @@ void V4L2EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
                 work->input.buffers.front()->data().graphicBlocks().front();
         if (mInputFormatConverter) {
             ALOGV("Converting input block (index: %" PRIu64 ")", index);
-            c2_status_t status = C2_CORRUPTED;
-            inputBlock = mInputFormatConverter->convertBlock(index, inputBlock, &status);
+            c2_status_t status =
+                    mInputFormatConverter->convertBlock(index, inputBlock, &inputBlock);
             if (status != C2_OK) {
                 ALOGE("Failed to convert input block (index: %" PRIu64 ")", index);
                 reportError(status);
@@ -555,7 +529,7 @@ void V4L2EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
     }
 }
 
-void V4L2EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
+void EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -579,7 +553,7 @@ void V4L2EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
     }
 }
 
-void V4L2EncodeComponent::onDrainDone(bool success) {
+void EncodeComponent::onDrainDone(bool success) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
     ALOG_ASSERT(!mWorkQueue.empty());
@@ -618,8 +592,8 @@ void V4L2EncodeComponent::onDrainDone(bool success) {
     mWorkQueue.pop_front();
 }
 
-void V4L2EncodeComponent::flushTask(::base::WaitableEvent* done,
-                                    std::list<std::unique_ptr<C2Work>>* const flushedWork) {
+void EncodeComponent::flushTask(::base::WaitableEvent* done,
+                                std::list<std::unique_ptr<C2Work>>* const flushedWork) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -637,8 +611,8 @@ void V4L2EncodeComponent::flushTask(::base::WaitableEvent* done,
     flush();
 }
 
-void V4L2EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
-                                          ::base::WaitableEvent* done) {
+void EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
+                                      ::base::WaitableEvent* done) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -646,76 +620,7 @@ void V4L2EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& liste
     done->Signal();
 }
 
-bool V4L2EncodeComponent::initializeEncoder() {
-    ALOGV("%s()", __func__);
-    ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
-    ALOG_ASSERT(!mInputFormatConverter);
-    ALOG_ASSERT(!mEncoder);
-
-    mLastFrameTime = std::nullopt;
-
-    // Get the requested profile and level.
-    C2Config::profile_t outputProfile = mInterface->getOutputProfile();
-
-    // CSD only needs to be extracted when using an H.264 profile.
-    mExtractCSD = IsH264Profile(outputProfile);
-
-    std::optional<uint8_t> h264Level;
-    if (IsH264Profile(outputProfile)) {
-        h264Level = c2LevelToV4L2Level(mInterface->getOutputLevel());
-    }
-
-    // Get the stride used by the C2 framework, as this might be different from the stride used by
-    // the V4L2 encoder.
-    std::optional<uint32_t> stride =
-            getVideoFrameStride(kInputPixelFormat, mInterface->getInputVisibleSize());
-    if (!stride) {
-        ALOGE("Failed to get video frame stride");
-        reportError(C2_CORRUPTED);
-        return false;
-    }
-
-    // Get the requested bitrate mode and bitrate. The C2 framework doesn't offer a parameter to
-    // configure the peak bitrate, so we use a multiple of the target bitrate.
-    mBitrateMode = mInterface->getBitrateMode();
-    if (property_get_bool("persist.vendor.v4l2_codec2.disable_vbr", false)) {
-        // NOTE: This is a workaround for b/235771157.
-        ALOGW("VBR is disabled on this device");
-        mBitrateMode = C2Config::BITRATE_CONST;
-    }
-
-    mBitrate = mInterface->getBitrate();
-
-    mEncoder = V4L2Encoder::create(
-            outputProfile, h264Level, mInterface->getInputVisibleSize(), *stride,
-            mInterface->getKeyFramePeriod(), mBitrateMode, mBitrate,
-            mBitrate * kPeakBitrateMultiplier,
-            ::base::BindRepeating(&V4L2EncodeComponent::fetchOutputBlock, mWeakThis),
-            ::base::BindRepeating(&V4L2EncodeComponent::onInputBufferDone, mWeakThis),
-            ::base::BindRepeating(&V4L2EncodeComponent::onOutputBufferDone, mWeakThis),
-            ::base::BindRepeating(&V4L2EncodeComponent::onDrainDone, mWeakThis),
-            ::base::BindRepeating(&V4L2EncodeComponent::reportError, mWeakThis, C2_CORRUPTED),
-            mEncoderTaskRunner);
-    if (!mEncoder) {
-        ALOGE("Failed to create V4L2Encoder (profile: %s)", profileToString(outputProfile));
-        return false;
-    }
-
-    // Add an input format convertor if the device doesn't support the requested input format.
-    ALOGV("Creating input format convertor (%s)",
-          videoPixelFormatToString(mEncoder->inputFormat()).c_str());
-    mInputFormatConverter =
-            FormatConverter::Create(mEncoder->inputFormat(), mEncoder->visibleSize(),
-                                    V4L2Encoder::kInputBufferCount, mEncoder->codedSize());
-    if (!mInputFormatConverter) {
-        ALOGE("Failed to created input format convertor");
-        return false;
-    }
-
-    return true;
-}
-
-bool V4L2EncodeComponent::updateEncodingParameters() {
+bool EncodeComponent::updateEncodingParameters() {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -733,10 +638,10 @@ bool V4L2EncodeComponent::updateEncodingParameters() {
         mBitrate = bitrate;
 
         if (mBitrateMode == C2Config::BITRATE_VARIABLE) {
-            ALOGV("Setting peak bitrate to %u", bitrate * kPeakBitrateMultiplier);
+            ALOGV("Setting peak bitrate to %u", bitrate * VideoEncoder::kPeakBitrateMultiplier);
             // TODO(b/190336806): Our stack doesn't support dynamic peak bitrate changes yet, ignore
             // errors for now.
-            mEncoder->setPeakBitrate(bitrate * kPeakBitrateMultiplier);
+            mEncoder->setPeakBitrate(bitrate * VideoEncoder::kPeakBitrateMultiplier);
         }
     }
 
@@ -777,7 +682,7 @@ bool V4L2EncodeComponent::updateEncodingParameters() {
     return true;
 }
 
-bool V4L2EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int64_t timestamp) {
+bool EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int64_t timestamp) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
     ALOG_ASSERT(mEncoder);
@@ -785,6 +690,21 @@ bool V4L2EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int6
     ALOGV("Encoding input block (index: %" PRIu64 ", timestamp: %" PRId64 ", size: %dx%d)", index,
           timestamp, block.width(), block.height());
 
+    // If this is the first input frame, determine the pixel format and layout.
+    if (mInputPixelFormat == VideoPixelFormat::UNKNOWN) {
+        ALOG_ASSERT(mInputLayout.empty());
+        VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
+        std::optional<std::vector<VideoFramePlane>> inputLayout =
+                getVideoFrameLayout(block, &format);
+        if (!inputLayout) {
+            ALOGE("Failed to get input block's layout");
+            reportError(C2_CORRUPTED);
+            return false;
+        }
+        mInputPixelFormat = format;
+        mInputLayout = std::move(*inputLayout);
+    }
+
     // Dynamically adjust framerate based on the frame's timestamp if required.
     constexpr int64_t kMaxFramerateDiff = 5;
     if (mLastFrameTime && (timestamp > *mLastFrameTime)) {
@@ -802,7 +722,8 @@ bool V4L2EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int6
     if (!updateEncodingParameters()) return false;
 
     // Create an input frame from the graphic block.
-    std::unique_ptr<V4L2Encoder::InputFrame> frame = CreateInputFrame(block, index, timestamp);
+    std::unique_ptr<VideoEncoder::InputFrame> frame =
+            createInputFrame(block, mInputPixelFormat, mInputLayout, index, timestamp);
     if (!frame) {
         ALOGE("Failed to create video frame from input block (index: %" PRIu64
               ", timestamp: %" PRId64 ")",
@@ -818,7 +739,7 @@ bool V4L2EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int6
     return true;
 }
 
-void V4L2EncodeComponent::flush() {
+void EncodeComponent::flush() {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -849,8 +770,7 @@ void V4L2EncodeComponent::flush() {
     }
 }
 
-void V4L2EncodeComponent::fetchOutputBlock(uint32_t size,
-                                           std::unique_ptr<BitstreamBuffer>* buffer) {
+void EncodeComponent::fetchOutputBlock(uint32_t size, std::unique_ptr<BitstreamBuffer>* buffer) {
     ALOGV("Fetching linear block (size: %u)", size);
     std::shared_ptr<C2LinearBlock> block;
     c2_status_t status = mOutputBlockPool->fetchLinearBlock(
@@ -866,7 +786,7 @@ void V4L2EncodeComponent::fetchOutputBlock(uint32_t size,
     *buffer = std::make_unique<BitstreamBuffer>(std::move(block), 0, size);
 }
 
-void V4L2EncodeComponent::onInputBufferDone(uint64_t index) {
+void EncodeComponent::onInputBufferDone(uint64_t index) {
     ALOGV("%s(): Input buffer done (index: %" PRIu64 ")", __func__, index);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
     ALOG_ASSERT(mEncoder);
@@ -908,8 +828,8 @@ void V4L2EncodeComponent::onInputBufferDone(uint64_t index) {
     }
 }
 
-void V4L2EncodeComponent::onOutputBufferDone(size_t dataSize, int64_t timestamp, bool keyFrame,
-                                             std::unique_ptr<BitstreamBuffer> buffer) {
+void EncodeComponent::onOutputBufferDone(size_t dataSize, int64_t timestamp, bool keyFrame,
+                                         std::unique_ptr<BitstreamBuffer> buffer) {
     ALOGV("%s(): output buffer done (timestamp: %" PRId64 ", size: %zu, keyframe: %d)", __func__,
           timestamp, dataSize, keyFrame);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
@@ -971,7 +891,7 @@ void V4L2EncodeComponent::onOutputBufferDone(size_t dataSize, int64_t timestamp,
     }
 }
 
-C2Work* V4L2EncodeComponent::getWorkByIndex(uint64_t index) {
+C2Work* EncodeComponent::getWorkByIndex(uint64_t index) {
     ALOGV("%s(): getting work item (index: %" PRIu64 ")", __func__, index);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -986,7 +906,7 @@ C2Work* V4L2EncodeComponent::getWorkByIndex(uint64_t index) {
     return it->get();
 }
 
-C2Work* V4L2EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
+C2Work* EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
     ALOGV("%s(): getting work item (timestamp: %" PRId64 ")", __func__, timestamp);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
     ALOG_ASSERT(timestamp >= 0);
@@ -1006,7 +926,7 @@ C2Work* V4L2EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
     return it->get();
 }
 
-bool V4L2EncodeComponent::isWorkDone(const C2Work& work) const {
+bool EncodeComponent::isWorkDone(const C2Work& work) const {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -1033,7 +953,7 @@ bool V4L2EncodeComponent::isWorkDone(const C2Work& work) const {
     return true;
 }
 
-void V4L2EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
+void EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
     ALOG_ASSERT(work);
     ALOGV("%s(): Reporting work item as finished (index: %llu, timestamp: %llu)", __func__,
           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull());
@@ -1047,7 +967,7 @@ void V4L2EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
     mListener->onWorkDone_nb(weak_from_this(), std::move(finishedWorkList));
 }
 
-bool V4L2EncodeComponent::getBlockPool() {
+bool EncodeComponent::getBlockPool() {
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
     auto sharedThis = weak_from_this().lock();
@@ -1068,7 +988,7 @@ bool V4L2EncodeComponent::getBlockPool() {
     return true;
 }
 
-void V4L2EncodeComponent::reportError(c2_status_t error) {
+void EncodeComponent::reportError(c2_status_t error) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
 
@@ -1080,7 +1000,7 @@ void V4L2EncodeComponent::reportError(c2_status_t error) {
     }
 }
 
-void V4L2EncodeComponent::setComponentState(ComponentState state) {
+void EncodeComponent::setComponentState(ComponentState state) {
     // Check whether the state change is valid.
     switch (state) {
     case ComponentState::UNLOADED:
@@ -1103,7 +1023,7 @@ void V4L2EncodeComponent::setComponentState(ComponentState state) {
     mComponentState = state;
 }
 
-const char* V4L2EncodeComponent::componentStateToString(V4L2EncodeComponent::ComponentState state) {
+const char* EncodeComponent::componentStateToString(EncodeComponent::ComponentState state) {
     switch (state) {
     case ComponentState::UNLOADED:
         return "UNLOADED";
diff --git a/components/V4L2EncodeInterface.cpp b/components/EncodeInterface.cpp
similarity index 81%
rename from components/V4L2EncodeInterface.cpp
rename to components/EncodeInterface.cpp
index 03d8c370981faeaae849adffa684d2bdc4eff414..9d7d81f7433b3a9f8a4ec007a209efd50a75007b 100644
--- a/components/V4L2EncodeInterface.cpp
+++ b/components/EncodeInterface.cpp
@@ -1,11 +1,11 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "V4L2EncodeInterface"
 
-#include <v4l2_codec2/components/V4L2EncodeInterface.h>
+#include <v4l2_codec2/components/EncodeInterface.h>
 
 #include <inttypes.h>
 #include <algorithm>
@@ -16,8 +16,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <utils/Log.h>
 
-#include <v4l2_codec2/common/V4L2ComponentCommon.h>
-#include <v4l2_codec2/common/V4L2Device.h>
+#include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/common/VideoTypes.h>
 
 using android::hardware::graphics::common::V1_0::BufferUsage;
@@ -41,41 +40,17 @@ constexpr uint32_t kDefaultBitrate = 64000;
 // The maximal output bitrate in bits per second. It's the max bitrate of AVC Level4.1.
 // TODO: increase this in the future for supporting higher level/resolution encoding.
 constexpr uint32_t kMaxBitrate = 50000000;
-
-std::optional<VideoCodec> getCodecFromComponentName(const std::string& name) {
-    if (name == V4L2ComponentName::kH264Encoder) return VideoCodec::H264;
-    if (name == V4L2ComponentName::kVP8Encoder) return VideoCodec::VP8;
-    if (name == V4L2ComponentName::kVP9Encoder) return VideoCodec::VP9;
-
-    ALOGE("Unknown name: %s", name.c_str());
-    return std::nullopt;
-}
-
-// Check whether the specified profile is a valid profile for the specified codec.
-bool IsValidProfileForCodec(VideoCodec codec, C2Config::profile_t profile) {
-    switch (codec) {
-    case VideoCodec::H264:
-        return ((profile >= C2Config::PROFILE_AVC_BASELINE) &&
-                (profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH));
-    case VideoCodec::VP8:
-        return ((profile >= C2Config::PROFILE_VP8_0) && (profile <= C2Config::PROFILE_VP8_3));
-    case VideoCodec::VP9:
-        return ((profile >= C2Config::PROFILE_VP9_0) && (profile <= C2Config::PROFILE_VP9_3));
-    default:
-        return false;
-    }
-}
-
 }  // namespace
 
-// static
-C2R V4L2EncodeInterface::H264ProfileLevelSetter(
-        bool /*mayBlock*/, C2P<C2StreamProfileLevelInfo::output>& info,
-        const C2P<C2StreamPictureSizeInfo::input>& videoSize,
-        const C2P<C2StreamFrameRateInfo::output>& frameRate,
-        const C2P<C2StreamBitrateInfo::output>& bitrate) {
-    static C2Config::level_t lowestConfigLevel = C2Config::LEVEL_UNUSED;
+//static
+C2Config::level_t EncodeInterface::lowestConfigLevel = C2Config::LEVEL_UNUSED;
 
+// static
+C2R EncodeInterface::H264ProfileLevelSetter(bool /*mayBlock*/,
+                                            C2P<C2StreamProfileLevelInfo::output>& info,
+                                            const C2P<C2StreamPictureSizeInfo::input>& videoSize,
+                                            const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                            const C2P<C2StreamBitrateInfo::output>& bitrate) {
     // Adopt default minimal profile instead if the requested profile is not supported, or lower
     // than the default minimal one.
     constexpr C2Config::profile_t minProfile = C2Config::PROFILE_AVC_BASELINE;
@@ -185,11 +160,11 @@ C2R V4L2EncodeInterface::H264ProfileLevelSetter(
     return C2R::Ok();
 }
 
-C2R V4L2EncodeInterface::VP9ProfileLevelSetter(
-        bool /*mayBlock*/, C2P<C2StreamProfileLevelInfo::output>& info,
-        const C2P<C2StreamPictureSizeInfo::input>& /*videoSize*/,
-        const C2P<C2StreamFrameRateInfo::output>& /*frameRate*/,
-        const C2P<C2StreamBitrateInfo::output>& /*bitrate*/) {
+C2R EncodeInterface::VP9ProfileLevelSetter(bool /*mayBlock*/,
+                                           C2P<C2StreamProfileLevelInfo::output>& info,
+                                           const C2P<C2StreamPictureSizeInfo::input>& /*videoSize*/,
+                                           const C2P<C2StreamFrameRateInfo::output>& /*frameRate*/,
+                                           const C2P<C2StreamBitrateInfo::output>& /*bitrate*/) {
     // Adopt default minimal profile instead if the requested profile is not supported, or lower
     // than the default minimal one.
     constexpr C2Config::profile_t defaultMinProfile = C2Config::PROFILE_VP9_0;
@@ -209,7 +184,7 @@ C2R V4L2EncodeInterface::VP9ProfileLevelSetter(
 }
 
 // static
-C2R V4L2EncodeInterface::SizeSetter(bool mayBlock, C2P<C2StreamPictureSizeInfo::input>& videoSize) {
+C2R EncodeInterface::SizeSetter(bool mayBlock, C2P<C2StreamPictureSizeInfo::input>& videoSize) {
     (void)mayBlock;
     // TODO: maybe apply block limit?
     return videoSize.F(videoSize.v.width)
@@ -218,8 +193,8 @@ C2R V4L2EncodeInterface::SizeSetter(bool mayBlock, C2P<C2StreamPictureSizeInfo::
 }
 
 // static
-C2R V4L2EncodeInterface::IntraRefreshPeriodSetter(bool mayBlock,
-                                                  C2P<C2StreamIntraRefreshTuning::output>& period) {
+C2R EncodeInterface::IntraRefreshPeriodSetter(bool mayBlock,
+                                              C2P<C2StreamIntraRefreshTuning::output>& period) {
     (void)mayBlock;
     if (period.v.period < 1) {
         period.set().mode = C2Config::INTRA_REFRESH_DISABLED;
@@ -231,40 +206,23 @@ C2R V4L2EncodeInterface::IntraRefreshPeriodSetter(bool mayBlock,
     return C2R::Ok();
 }
 
-V4L2EncodeInterface::V4L2EncodeInterface(const C2String& name,
-                                         std::shared_ptr<C2ReflectorHelper> helper)
+EncodeInterface::EncodeInterface(const C2String& name, std::shared_ptr<C2ReflectorHelper> helper,
+                                 const SupportedCapabilities& caps)
       : C2InterfaceHelper(std::move(helper)) {
     ALOGV("%s(%s)", __func__, name.c_str());
 
     setDerivedInstance(this);
 
-    Initialize(name);
+    Initialize(name, caps);
 }
 
-void V4L2EncodeInterface::Initialize(const C2String& name) {
-    scoped_refptr<V4L2Device> device = V4L2Device::create();
-    if (!device) {
-        ALOGE("Failed to create V4L2 device");
-        mInitStatus = C2_CORRUPTED;
-        return;
-    }
-
-    auto codec = getCodecFromComponentName(name);
-    if (!codec) {
-        ALOGE("Invalid component name");
-        mInitStatus = C2_BAD_VALUE;
-        return;
-    }
-
-    V4L2Device::SupportedEncodeProfiles supported_profiles = device->getSupportedEncodeProfiles();
-
-    // Compile the list of supported profiles.
+void EncodeInterface::Initialize(const C2String& name, const SupportedCapabilities& caps) {
     // Note: unsigned int is used here, since std::vector<C2Config::profile_t> cannot convert to
     // std::vector<unsigned int> required by the c2 framework below.
     std::vector<unsigned int> profiles;
     ui::Size maxSize;
-    for (const auto& supportedProfile : supported_profiles) {
-        if (!IsValidProfileForCodec(codec.value(), supportedProfile.profile)) {
+    for (const auto& supportedProfile : caps.supportedProfiles) {
+        if (!isValidProfileForCodec(caps.codec, supportedProfile.profile)) {
             continue;  // Ignore unrecognizable or unsupported profiles.
         }
         ALOGV("Queried c2_profile = 0x%x : max_size = %d x %d", supportedProfile.profile,
@@ -320,7 +278,7 @@ void V4L2EncodeInterface::Initialize(const C2String& name) {
                     .build());
 
     std::string outputMime;
-    if (getCodecFromComponentName(name) == VideoCodec::H264) {
+    if (caps.codec == VideoCodec::H264) {
         outputMime = MEDIA_MIMETYPE_VIDEO_AVC;
         C2Config::profile_t minProfile = static_cast<C2Config::profile_t>(
                 *std::min_element(profiles.begin(), profiles.end()));
@@ -342,14 +300,14 @@ void V4L2EncodeInterface::Initialize(const C2String& name) {
                                                  C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1})})
                         .withSetter(H264ProfileLevelSetter, mInputVisibleSize, mFrameRate, mBitrate)
                         .build());
-    } else if (getCodecFromComponentName(name) == VideoCodec::VP8) {
+    } else if (caps.codec == VideoCodec::VP8) {
         outputMime = MEDIA_MIMETYPE_VIDEO_VP8;
         // VP8 doesn't have conventional profiles, we'll use profile0 if the VP8 codec is requested.
         addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
                              .withConstValue(new C2StreamProfileLevelInfo::output(
                                      0u, C2Config::PROFILE_VP8_0, C2Config::LEVEL_UNUSED))
                              .build());
-    } else if (getCodecFromComponentName(name) == VideoCodec::VP9) {
+    } else if (caps.codec == VideoCodec::VP9) {
         outputMime = MEDIA_MIMETYPE_VIDEO_VP9;
         C2Config::profile_t minProfile = static_cast<C2Config::profile_t>(
                 *std::min_element(profiles.begin(), profiles.end()));
@@ -382,17 +340,10 @@ void V4L2EncodeInterface::Initialize(const C2String& name) {
                     .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
                     .build());
 
-    // TODO(b/167640667) Add VIDEO_ENCODER flag once input convertor is not enabled by default.
-    // When using the format convertor (which is currently always enabled) it's not useful to add
-    // the VIDEO_ENCODER buffer flag for input buffers here. Currently zero-copy is not supported
-    // yet, so when using this flag an additional buffer will be allocated on host side and a copy
-    // will be performed between the guest and host buffer to keep them in sync. This is wasteful as
-    // the buffer is only used on guest side by the format convertor which converts and copies the
-    // buffer into another buffer.
-    //addParameter(DefineParam(mInputMemoryUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-    //                     .withConstValue(new C2StreamUsageTuning::input(
-    //                             0u, static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)))
-    //                     .build());
+    addParameter(DefineParam(mInputMemoryUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                         .withConstValue(new C2StreamUsageTuning::input(
+                                 0u, static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)))
+                         .build());
 
     addParameter(
             DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
@@ -458,7 +409,7 @@ void V4L2EncodeInterface::Initialize(const C2String& name) {
     mInitStatus = C2_OK;
 }
 
-uint32_t V4L2EncodeInterface::getKeyFramePeriod() const {
+uint32_t EncodeInterface::getKeyFramePeriod() const {
     if (mKeyFramePeriodUs->value < 0 || mKeyFramePeriodUs->value == INT64_MAX) {
         return 0;
     }
diff --git a/components/V4L2ComponentStore.cpp b/components/V4L2ComponentStore.cpp
deleted file mode 100644
index feb579969bbf21ca0fe4e43fd475a679b66c7333..0000000000000000000000000000000000000000
--- a/components/V4L2ComponentStore.cpp
+++ /dev/null
@@ -1,208 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "V4L2ComponentStore"
-
-#include <v4l2_codec2/components/V4L2ComponentStore.h>
-
-#include <stdint.h>
-
-#include <memory>
-#include <mutex>
-
-#include <C2.h>
-#include <C2Config.h>
-#include <log/log.h>
-#include <media/stagefright/foundation/MediaDefs.h>
-
-#include <v4l2_codec2/common/V4L2ComponentCommon.h>
-#include <v4l2_codec2/components/V4L2ComponentFactory.h>
-
-namespace android {
-namespace {
-const uint32_t kComponentRank = 0x80;
-
-std::string getMediaTypeFromComponentName(const std::string& name) {
-    if (name == V4L2ComponentName::kH264Decoder || name == V4L2ComponentName::kH264SecureDecoder ||
-        name == V4L2ComponentName::kH264Encoder) {
-        return MEDIA_MIMETYPE_VIDEO_AVC;
-    }
-    if (name == V4L2ComponentName::kVP8Decoder || name == V4L2ComponentName::kVP8SecureDecoder ||
-        name == V4L2ComponentName::kVP8Encoder) {
-        return MEDIA_MIMETYPE_VIDEO_VP8;
-    }
-    if (name == V4L2ComponentName::kVP9Decoder || name == V4L2ComponentName::kVP9SecureDecoder ||
-        name == V4L2ComponentName::kVP9Encoder) {
-        return MEDIA_MIMETYPE_VIDEO_VP9;
-    }
-    if (name == V4L2ComponentName::kHEVCDecoder || name == V4L2ComponentName::kHEVCSecureDecoder) {
-        return MEDIA_MIMETYPE_VIDEO_HEVC;
-    }
-    return "";
-}
-
-}  // namespace
-
-// static
-std::shared_ptr<C2ComponentStore> V4L2ComponentStore::Create() {
-    ALOGV("%s()", __func__);
-
-    static std::mutex mutex;
-    static std::weak_ptr<C2ComponentStore> platformStore;
-
-    std::lock_guard<std::mutex> lock(mutex);
-    std::shared_ptr<C2ComponentStore> store = platformStore.lock();
-    if (store != nullptr) return store;
-
-    store = std::shared_ptr<C2ComponentStore>(new V4L2ComponentStore());
-    platformStore = store;
-    return store;
-}
-
-V4L2ComponentStore::V4L2ComponentStore() : mReflector(std::make_shared<C2ReflectorHelper>()) {
-    ALOGV("%s()", __func__);
-}
-
-V4L2ComponentStore::~V4L2ComponentStore() {
-    ALOGV("%s()", __func__);
-
-    std::lock_guard<std::mutex> lock(mCachedFactoriesLock);
-    mCachedFactories.clear();
-}
-
-C2String V4L2ComponentStore::getName() const {
-    return "android.componentStore.v4l2";
-}
-
-c2_status_t V4L2ComponentStore::createComponent(C2String name,
-                                                std::shared_ptr<C2Component>* const component) {
-    ALOGV("%s(%s)", __func__, name.c_str());
-
-    if (!V4L2ComponentName::isValid(name.c_str())) {
-        ALOGI("%s(): Invalid component name: %s", __func__, name.c_str());
-        return C2_NOT_FOUND;
-    }
-
-    auto factory = GetFactory(name);
-    if (factory == nullptr) return C2_CORRUPTED;
-
-    component->reset();
-    return factory->createComponent(0, component);
-}
-
-c2_status_t V4L2ComponentStore::createInterface(
-        C2String name, std::shared_ptr<C2ComponentInterface>* const interface) {
-    ALOGV("%s(%s)", __func__, name.c_str());
-
-    if (!V4L2ComponentName::isValid(name.c_str())) {
-        ALOGI("%s(): Invalid component name: %s", __func__, name.c_str());
-        return C2_NOT_FOUND;
-    }
-
-    auto factory = GetFactory(name);
-    if (factory == nullptr) return C2_CORRUPTED;
-
-    interface->reset();
-    return factory->createInterface(0, interface);
-}
-
-std::vector<std::shared_ptr<const C2Component::Traits>> V4L2ComponentStore::listComponents() {
-    ALOGV("%s()", __func__);
-
-    std::vector<std::shared_ptr<const C2Component::Traits>> ret;
-    ret.push_back(GetTraits(V4L2ComponentName::kH264Encoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kH264Decoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kH264SecureDecoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP8Encoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP8Decoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP8SecureDecoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP9Encoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP9Decoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kVP9SecureDecoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kHEVCDecoder));
-    ret.push_back(GetTraits(V4L2ComponentName::kHEVCSecureDecoder));
-    return ret;
-}
-
-std::shared_ptr<C2ParamReflector> V4L2ComponentStore::getParamReflector() const {
-    return mReflector;
-}
-
-c2_status_t V4L2ComponentStore::copyBuffer(std::shared_ptr<C2GraphicBuffer> /* src */,
-                                           std::shared_ptr<C2GraphicBuffer> /* dst */) {
-    return C2_OMITTED;
-}
-
-c2_status_t V4L2ComponentStore::querySupportedParams_nb(
-        std::vector<std::shared_ptr<C2ParamDescriptor>>* const /* params */) const {
-    return C2_OK;
-}
-
-c2_status_t V4L2ComponentStore::query_sm(
-        const std::vector<C2Param*>& stackParams,
-        const std::vector<C2Param::Index>& heapParamIndices,
-        std::vector<std::unique_ptr<C2Param>>* const /* heapParams */) const {
-    // There are no supported config params.
-    return stackParams.empty() && heapParamIndices.empty() ? C2_OK : C2_BAD_INDEX;
-}
-
-c2_status_t V4L2ComponentStore::config_sm(
-        const std::vector<C2Param*>& params,
-        std::vector<std::unique_ptr<C2SettingResult>>* const /* failures */) {
-    // There are no supported config params.
-    return params.empty() ? C2_OK : C2_BAD_INDEX;
-}
-
-c2_status_t V4L2ComponentStore::querySupportedValues_sm(
-        std::vector<C2FieldSupportedValuesQuery>& fields) const {
-    // There are no supported config params.
-    return fields.empty() ? C2_OK : C2_BAD_INDEX;
-}
-
-::C2ComponentFactory* V4L2ComponentStore::GetFactory(const C2String& name) {
-    ALOGV("%s(%s)", __func__, name.c_str());
-    ALOG_ASSERT(V4L2ComponentName::isValid(name.c_str()));
-
-    std::lock_guard<std::mutex> lock(mCachedFactoriesLock);
-    const auto it = mCachedFactories.find(name);
-    if (it != mCachedFactories.end()) return it->second.get();
-
-    std::unique_ptr<::C2ComponentFactory> factory = V4L2ComponentFactory::create(
-            name, std::static_pointer_cast<C2ReflectorHelper>(getParamReflector()));
-    if (factory == nullptr) {
-        ALOGE("Failed to create factory for %s", name.c_str());
-        return nullptr;
-    }
-
-    auto ret = factory.get();
-    mCachedFactories.emplace(name, std::move(factory));
-    return ret;
-}
-
-std::shared_ptr<const C2Component::Traits> V4L2ComponentStore::GetTraits(const C2String& name) {
-    ALOGV("%s(%s)", __func__, name.c_str());
-
-    if (!V4L2ComponentName::isValid(name.c_str())) {
-        ALOGE("Invalid component name: %s", name.c_str());
-        return nullptr;
-    }
-
-    std::lock_guard<std::mutex> lock(mCachedTraitsLock);
-    auto it = mCachedTraits.find(name);
-    if (it != mCachedTraits.end()) return it->second;
-
-    auto traits = std::make_shared<C2Component::Traits>();
-    traits->name = name;
-    traits->domain = C2Component::DOMAIN_VIDEO;
-    traits->rank = kComponentRank;
-    traits->mediaType = getMediaTypeFromComponentName(name);
-    traits->kind = V4L2ComponentName::isEncoder(name.c_str()) ? C2Component::KIND_ENCODER
-                                                              : C2Component::KIND_DECODER;
-
-    mCachedTraits.emplace(name, traits);
-    return traits;
-}
-
-}  // namespace android
diff --git a/components/VideoEncoder.cpp b/components/VideoEncoder.cpp
index e3e19c229ea61ca6da6dc88d6ce10ef16e9685c8..8f1044b80e2d3b0fc0110900aa1cdb88e6824689 100644
--- a/components/VideoEncoder.cpp
+++ b/components/VideoEncoder.cpp
@@ -6,11 +6,12 @@
 
 namespace android {
 
-VideoEncoder::InputFrame::InputFrame(std::vector<int>&& fds, std::vector<VideoFramePlane>&& planes,
+VideoEncoder::InputFrame::InputFrame(std::vector<int>&& fds,
+                                     const std::vector<VideoFramePlane>& planes,
                                      VideoPixelFormat pixelFormat, uint64_t index,
                                      int64_t timestamp)
       : mFds(std::move(fds)),
-        mPlanes(std::move(planes)),
+        mPlanes(planes),
         mPixelFormat(pixelFormat),
         mIndex(index),
         mTimestamp(timestamp) {}
diff --git a/components/VideoFramePool.cpp b/components/VideoFramePool.cpp
index 4bf45f3fd02fc9e96e87286e015697035f17d9f1..9927acf6f30da1a8e509b9d8d1534f65fe9e1e66 100644
--- a/components/VideoFramePool.cpp
+++ b/components/VideoFramePool.cpp
@@ -179,7 +179,7 @@ void VideoFramePool::getVideoFrameTask() {
     ALOG_ASSERT(mFetchTaskRunner->RunsTasksInCurrentSequence());
 
     // Variables used to exponential backoff retry when buffer fetching times out.
-    constexpr size_t kFetchRetryDelayInit = 64;    // Initial delay: 64us
+    constexpr size_t kFetchRetryDelayInit = 256;   // Initial delay: 256us
     constexpr size_t kFetchRetryDelayMax = 16384;  // Max delay: 16ms (1 frame at 60fps)
     constexpr size_t kFenceWaitTimeoutNs = 16000000;  // 16ms (1 frame at 60fps)
     static size_t sNumRetries = 0;
@@ -190,7 +190,18 @@ void VideoFramePool::getVideoFrameTask() {
     c2_status_t err = mBlockPool->fetchGraphicBlock(mSize.width, mSize.height,
                                                     static_cast<uint32_t>(mPixelFormat),
                                                     mMemoryUsage, &block, &fence);
-    if (err == C2_BLOCKING) {
+    // C2_BLOCKING can be returned either based on the state of the block pool itself
+    // or the state of the underlying buffer queue. If the cause is the underlying
+    // buffer queue, then the block pool returns a null fence. Since a null fence is
+    // immediately ready, we need to delay instead of trying to wait on the fence, to
+    // avoid spinning.
+    //
+    // Unfortunately, a null fence is considered a valid fence, so the best we can do
+    // to detect a null fence is to assume that any fence that is immediately ready
+    // is the null fence. A false positive by racing with a real fence can result in
+    // an unnecessary delay, but the only alternative is to ignore fences altogether
+    // and always delay.
+    if (err == C2_BLOCKING && !fence.ready()) {
         err = fence.wait(kFenceWaitTimeoutNs);
         if (err == C2_OK) {
             ALOGV("%s(): fence wait succeded, retrying now", __func__);
@@ -231,7 +242,7 @@ void VideoFramePool::getVideoFrameTask() {
                 FROM_HERE, ::base::BindOnce(&VideoFramePool::getVideoFrameTask, mFetchWeakThis),
                 ::base::TimeDelta::FromMicroseconds(sDelay));
 
-        sDelay = std::min(sDelay * 2, kFetchRetryDelayMax);  // Exponential backoff
+        sDelay = std::min(sDelay * 4, kFetchRetryDelayMax);  // Exponential backoff
         sNumRetries++;
         return;
     }
diff --git a/components/include/v4l2_codec2/components/V4L2ComponentStore.h b/components/include/v4l2_codec2/components/ComponentStore.h
similarity index 61%
rename from components/include/v4l2_codec2/components/V4L2ComponentStore.h
rename to components/include/v4l2_codec2/components/ComponentStore.h
index bfec40733af87fb445d510473999d1342dc99da0..a759d4d00a5d9b391a77ab4a4f8a7f17356febd0 100644
--- a/components/include/v4l2_codec2/components/V4L2ComponentStore.h
+++ b/components/include/v4l2_codec2/components/ComponentStore.h
@@ -1,9 +1,9 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_STORE_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_STORE_H
+#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_COMPONENT_STORE_MIXIN_H
+#define ANDROID_V4L2_CODEC2_COMPONENTS_COMPONENT_STORE_MIXIN_H
 
 #include <map>
 #include <mutex>
@@ -15,10 +15,15 @@
 
 namespace android {
 
-class V4L2ComponentStore : public C2ComponentStore {
+enum class VideoCodec;
+
+class ComponentStore : public C2ComponentStore {
 public:
-    static std::shared_ptr<C2ComponentStore> Create();
-    ~V4L2ComponentStore();
+    using GetFactory = std::function<std::unique_ptr<C2ComponentFactory>(
+            const std::string& /* name */, std::shared_ptr<C2ReflectorHelper>)>;
+    class Builder;
+
+    virtual ~ComponentStore();
 
     // C2ComponentStore implementation.
     C2String getName() const override;
@@ -41,10 +46,21 @@ public:
             std::vector<C2FieldSupportedValuesQuery>& fields) const override;
 
 private:
-    V4L2ComponentStore();
+    struct Declaration {
+        VideoCodec codec;
+        C2Component::kind_t kind;
+        GetFactory factory;
+    };
+
+    ComponentStore(C2String storeName);
+
+    ::C2ComponentFactory* getFactory(const C2String& name);
 
-    ::C2ComponentFactory* GetFactory(const C2String& name);
-    std::shared_ptr<const C2Component::Traits> GetTraits(const C2String& name);
+    std::shared_ptr<const C2Component::Traits> getTraits(const C2String& name);
+
+    C2String mStoreName;
+
+    std::map<std::string, Declaration> mDeclarations;
 
     std::shared_ptr<C2ReflectorHelper> mReflector;
 
@@ -54,8 +70,25 @@ private:
     std::mutex mCachedTraitsLock;
     std::map<C2String, std::shared_ptr<const C2Component::Traits>> mCachedTraits
             GUARDED_BY(mCachedTraitsLock);
+
+    friend class Builder;
+};
+
+class ComponentStore::Builder final {
+public:
+    Builder(C2String storeName);
+    ~Builder() = default;
+
+    Builder& decoder(std::string name, VideoCodec codec, GetFactory factory);
+
+    Builder& encoder(std::string name, VideoCodec codec, GetFactory factory);
+
+    std::shared_ptr<ComponentStore> build() &&;
+
+private:
+    std::unique_ptr<ComponentStore> mStore;
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_STORE_H
+#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_COMPONENT_STORE_MIXIN_H
diff --git a/components/include/v4l2_codec2/components/V4L2DecodeComponent.h b/components/include/v4l2_codec2/components/DecodeComponent.h
similarity index 79%
rename from components/include/v4l2_codec2/components/V4L2DecodeComponent.h
rename to components/include/v4l2_codec2/components/DecodeComponent.h
index 962f7d6049b4c9cdb3303da0a4a374c14c363e94..27905c7c9ee76ff017c35f77d9258ce6a1bf0ca5 100644
--- a/components/include/v4l2_codec2/components/V4L2DecodeComponent.h
+++ b/components/include/v4l2_codec2/components/DecodeComponent.h
@@ -1,9 +1,9 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_COMPONENT_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_COMPONENT_H
+#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_COMPONENT_H
+#define ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_COMPONENT_H
 
 #include <atomic>
 #include <memory>
@@ -17,22 +17,17 @@
 #include <base/synchronization/waitable_event.h>
 #include <base/threading/thread.h>
 
-#include <v4l2_codec2/components/V4L2DecodeInterface.h>
+#include <v4l2_codec2/components/DecodeInterface.h>
 #include <v4l2_codec2/components/VideoDecoder.h>
 #include <v4l2_codec2/components/VideoFramePool.h>
 
 namespace android {
 
-class V4L2DecodeComponent : public C2Component,
-                            public std::enable_shared_from_this<V4L2DecodeComponent> {
+class DecodeComponent : public C2Component, public std::enable_shared_from_this<DecodeComponent> {
 public:
-    static std::shared_ptr<C2Component> create(const std::string& name, c2_node_id_t id,
-                                               const std::shared_ptr<C2ReflectorHelper>& helper,
-                                               C2ComponentFactory::ComponentDeleter deleter);
-    V4L2DecodeComponent(const std::string& name, c2_node_id_t id,
-                        const std::shared_ptr<C2ReflectorHelper>& helper,
-                        const std::shared_ptr<V4L2DecodeInterface>& intfImpl);
-    ~V4L2DecodeComponent() override;
+    DecodeComponent(uint32_t debugStreamId, const std::string& name, c2_node_id_t id,
+                    const std::shared_ptr<DecodeInterface>& intfImpl);
+    virtual ~DecodeComponent() override;
 
     // Implementation of C2Component.
     c2_status_t start() override;
@@ -48,7 +43,7 @@ public:
     c2_status_t drain_nb(drain_mode_t mode) override;
     std::shared_ptr<C2ComponentInterface> intf() override;
 
-private:
+protected:
     // The C2Component state machine.
     enum class ComponentState {
         STOPPED,
@@ -59,7 +54,7 @@ private:
     static const char* ComponentStateToString(ComponentState state);
 
     // Handle C2Component's public methods on |mDecoderTaskRunner|.
-    void startTask(c2_status_t* status, ::base::WaitableEvent* done);
+    virtual void startTask(c2_status_t* status, ::base::WaitableEvent* done) = 0;
     void stopTask();
     void releaseTask();
     void queueTask(std::unique_ptr<C2Work> work);
@@ -69,6 +64,11 @@ private:
 
     // Try to process pending works at |mPendingWorks|. Paused when |mIsDraining| is set.
     void pumpPendingWorks();
+
+    void processCSDWork(const int32_t bitstreamId, const C2Work* work);
+    void processWork(const int32_t bitstreamId, const C2Work* work);
+    void processWorkBuffer(const int32_t bitstreamId, const C2ConstLinearBlock& linearBlock);
+
     // Get the buffer pool.
     std::unique_ptr<VideoFramePool> getVideoFramePool(const ui::Size& size,
                                                       HalPixelFormat pixelFormat,
@@ -92,10 +92,11 @@ private:
     // Report error when any error occurs.
     void reportError(c2_status_t error);
 
-    static std::atomic<int32_t> sConcurrentInstances;
+    // Identifier used for debugging purposes.
+    uint32_t mDebugStreamId;
 
     // The pointer of component interface implementation.
-    std::shared_ptr<V4L2DecodeInterface> mIntfImpl;
+    std::shared_ptr<DecodeInterface> mIntfImpl;
     // The pointer of component interface.
     const std::shared_ptr<C2ComponentInterface> mIntf;
     // The pointer of component listener.
@@ -137,13 +138,13 @@ private:
 
     // The device task runner and its sequence checker. We should interact with
     // |mDevice| on this.
-    ::base::Thread mDecoderThread{"V4L2DecodeComponentDecoderThread"};
+    ::base::Thread mDecoderThread{"DecodeComponentDecoderThread"};
     scoped_refptr<::base::SequencedTaskRunner> mDecoderTaskRunner;
 
-    ::base::WeakPtrFactory<V4L2DecodeComponent> mWeakThisFactory{this};
-    ::base::WeakPtr<V4L2DecodeComponent> mWeakThis;
+    ::base::WeakPtrFactory<DecodeComponent> mWeakThisFactory{this};
+    ::base::WeakPtr<DecodeComponent> mWeakThis;
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_COMPONENT_H
+#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_COMPONENT_H
diff --git a/components/include/v4l2_codec2/components/V4L2DecodeInterface.h b/components/include/v4l2_codec2/components/DecodeInterface.h
similarity index 83%
rename from components/include/v4l2_codec2/components/V4L2DecodeInterface.h
rename to components/include/v4l2_codec2/components/DecodeInterface.h
index f2ab898818c91632c62925c1b35edf563291a949..7e513c5ae320f28670ea8ae38947814107f7146d 100644
--- a/components/include/v4l2_codec2/components/V4L2DecodeInterface.h
+++ b/components/include/v4l2_codec2/components/DecodeInterface.h
@@ -1,9 +1,9 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_INTERFACE_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_INTERFACE_H
+#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_INTERFACE_H
+#define ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_INTERFACE_H
 
 #include <memory>
 #include <string>
@@ -12,16 +12,18 @@
 #include <ui/Size.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/common/VideoTypes.h>
 
 namespace android {
 
-class V4L2DecodeInterface : public C2InterfaceHelper {
+class DecodeInterface : public C2InterfaceHelper {
 public:
-    V4L2DecodeInterface(const std::string& name, const std::shared_ptr<C2ReflectorHelper>& helper);
-    V4L2DecodeInterface(const V4L2DecodeInterface&) = delete;
-    V4L2DecodeInterface& operator=(const V4L2DecodeInterface&) = delete;
-    ~V4L2DecodeInterface() = default;
+    DecodeInterface(const std::string& name, const std::shared_ptr<C2ReflectorHelper>& helper,
+                    const SupportedCapabilities& caps);
+    DecodeInterface(const DecodeInterface&) = delete;
+    DecodeInterface& operator=(const DecodeInterface&) = delete;
+    ~DecodeInterface() = default;
 
     // interfaces for the client component.
     c2_status_t status() const { return mInitStatus; }
@@ -38,6 +40,7 @@ private:
     // Configurable parameter setters.
     static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& info);
     static C2R SizeSetter(bool mayBlock, C2P<C2StreamPictureSizeInfo::output>& videoSize);
+    static C2R InputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& inputSize);
     static C2R MaxInputBufferSizeCalculator(bool mayBlock,
                                             C2P<C2StreamMaxBufferSizeInfo::input>& me,
                                             const C2P<C2StreamPictureSizeInfo::output>& size);
@@ -66,6 +69,9 @@ private:
     // buffer can be released by the component; only used for H264 because H264 may reorder the
     // output frames.
     std::shared_ptr<C2PortDelayTuning::output> mOutputDelay;
+    // The number of extra frames processed at one time by the component. Allows more input
+    // buffers to be simultaneously enqueued.
+    std::shared_ptr<C2PipelineDelayTuning> mPipelineDelay;
     // The input codec profile and level. For now configuring this parameter is useless since
     // the component always uses fixed codec profile to initialize accelerator. It is only used
     // for the client to query supported profile and level values.
@@ -100,4 +106,4 @@ private:
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODE_INTERFACE_H
+#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_DECODE_INTERFACE_H
diff --git a/components/include/v4l2_codec2/components/V4L2EncodeComponent.h b/components/include/v4l2_codec2/components/EncodeComponent.h
similarity index 81%
rename from components/include/v4l2_codec2/components/V4L2EncodeComponent.h
rename to components/include/v4l2_codec2/components/EncodeComponent.h
index 0b150e44b742d7e2a5ddcee10a4879b8f1ed4d45..81c8c6d28966adc4b2133f748437207090222e10 100644
--- a/components/include/v4l2_codec2/components/V4L2EncodeComponent.h
+++ b/components/include/v4l2_codec2/components/EncodeComponent.h
@@ -1,14 +1,15 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_COMPONENT_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_COMPONENT_H
+#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_COMPONENT_H
+#define ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_COMPONENT_H
 
 #include <atomic>
 #include <memory>
 #include <optional>
 #include <unordered_map>
+#include <vector>
 
 #include <C2Component.h>
 #include <C2ComponentFactory.h>
@@ -22,23 +23,24 @@
 #include <base/synchronization/waitable_event.h>
 #include <base/threading/thread.h>
 #include <util/C2InterfaceHelper.h>
+#include <v4l2_codec2/common/Common.h>
+#include <v4l2_codec2/common/VideoPixelFormat.h>
 
 namespace android {
 
 struct BitstreamBuffer;
 class FormatConverter;
 class VideoEncoder;
-class V4L2EncodeInterface;
+class EncodeInterface;
 
-class V4L2EncodeComponent : public C2Component,
-                            public std::enable_shared_from_this<V4L2EncodeComponent> {
-public:
-    // Create a new instance of the V4L2EncodeComponent.
-    static std::shared_ptr<C2Component> create(C2String name, c2_node_id_t id,
-                                               std::shared_ptr<C2ReflectorHelper> helper,
-                                               C2ComponentFactory::ComponentDeleter deleter);
+std::optional<std::vector<VideoFramePlane>> getVideoFrameLayout(const C2ConstGraphicBlock& block,
+                                                                VideoPixelFormat* format);
+
+std::optional<uint32_t> getVideoFrameStride(VideoPixelFormat format, ui::Size size);
 
-    virtual ~V4L2EncodeComponent() override;
+class EncodeComponent : public C2Component, public std::enable_shared_from_this<EncodeComponent> {
+public:
+    virtual ~EncodeComponent() override;
 
     // Implementation of the C2Component interface.
     c2_status_t start() override;
@@ -54,7 +56,7 @@ public:
                                c2_blocking_t mayBlock) override;
     std::shared_ptr<C2ComponentInterface> intf() override;
 
-private:
+protected:
     // Possible component states.
     enum class ComponentState {
         UNLOADED,  // Initial state of component.
@@ -63,11 +65,13 @@ private:
         ERROR,     // An error occurred.
     };
 
-    V4L2EncodeComponent(C2String name, c2_node_id_t id,
-                        std::shared_ptr<V4L2EncodeInterface> interface);
+    EncodeComponent(C2String name, c2_node_id_t id, std::shared_ptr<EncodeInterface> interface);
 
-    V4L2EncodeComponent(const V4L2EncodeComponent&) = delete;
-    V4L2EncodeComponent& operator=(const V4L2EncodeComponent&) = delete;
+    EncodeComponent(const EncodeComponent&) = delete;
+    EncodeComponent& operator=(const EncodeComponent&) = delete;
+
+    // Initialize the V4L2 device for encoding with the requested configuration.
+    virtual bool initializeEncoder() = 0;
 
     // Initialize the encoder on the encoder thread.
     void startTask(bool* success, ::base::WaitableEvent* done);
@@ -87,8 +91,6 @@ private:
     // Set the component listener on the encoder thread.
     void setListenerTask(const std::shared_ptr<Listener>& listener, ::base::WaitableEvent* done);
 
-    // Initialize the V4L2 device for encoding with the requested configuration.
-    bool initializeEncoder();
     // Update the |mBitrate| and |mFramerate| currently configured on the V4L2 device, to match the
     // values requested by the codec 2.0 framework.
     bool updateEncodingParameters();
@@ -132,14 +134,12 @@ private:
     // The underlying V4L2 encoder.
     std::unique_ptr<VideoEncoder> mEncoder;
 
-    // The number of concurrent encoder instances currently created.
-    static std::atomic<int32_t> sConcurrentInstances;
     // The component's registered name.
     const C2String mName;
     // The component's id, provided by the C2 framework upon initialization.
     const c2_node_id_t mId = 0;
     // The component's interface implementation.
-    const std::shared_ptr<V4L2EncodeInterface> mInterface;
+    const std::shared_ptr<EncodeInterface> mInterface;
 
     // Mutex used by the component to synchronize start/stop/reset/release calls, as the codec 2.0
     // API can be accessed from any thread.
@@ -153,6 +153,11 @@ private:
     // An input format convertor will be used if the device doesn't support the video's format.
     std::unique_ptr<FormatConverter> mInputFormatConverter;
 
+    // Pixel format of frames sent to V4L2 encoder, determined when the first input frame is queued.
+    VideoPixelFormat mInputPixelFormat = VideoPixelFormat::UNKNOWN;
+    // Layout of frames sent to V4L2 encoder, determined when the first input frame is queued.
+    std::vector<VideoFramePlane> mInputLayout;
+
     // The bitrate currently configured on the v4l2 device.
     uint32_t mBitrate = 0;
     // The bitrate mode currently configured on the v4l2 device.
@@ -175,15 +180,15 @@ private:
     std::atomic<ComponentState> mComponentState;
 
     // The encoder thread on which all interaction with the V4L2 device is performed.
-    ::base::Thread mEncoderThread{"V4L2EncodeComponentThread"};
+    ::base::Thread mEncoderThread{"EncodeComponentThread"};
     // The task runner on the encoder thread.
     scoped_refptr<::base::SequencedTaskRunner> mEncoderTaskRunner;
 
     // The WeakPtrFactory used to get weak pointers of this.
-    ::base::WeakPtr<V4L2EncodeComponent> mWeakThis;
-    ::base::WeakPtrFactory<V4L2EncodeComponent> mWeakThisFactory{this};
+    ::base::WeakPtr<EncodeComponent> mWeakThis;
+    ::base::WeakPtrFactory<EncodeComponent> mWeakThisFactory{this};
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_COMPONENT_H
+#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_COMPONENT_H
diff --git a/components/include/v4l2_codec2/components/V4L2EncodeInterface.h b/components/include/v4l2_codec2/components/EncodeInterface.h
similarity index 88%
rename from components/include/v4l2_codec2/components/V4L2EncodeInterface.h
rename to components/include/v4l2_codec2/components/EncodeInterface.h
index fefebf0fa98713e20a575a149adeac8826ccbb6f..e59f58cfd5e85e98020246de03369f2a166cd8a3 100644
--- a/components/include/v4l2_codec2/components/V4L2EncodeInterface.h
+++ b/components/include/v4l2_codec2/components/EncodeInterface.h
@@ -1,9 +1,9 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
+// Copyright 2023 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_INTERFACE_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_INTERFACE_H
+#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_INTERFACE_H
+#define ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_INTERFACE_H
 
 #include <optional>
 #include <vector>
@@ -14,6 +14,7 @@
 #include <ui/Size.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/common/EncodeHelpers.h>
 
 namespace media {
@@ -24,9 +25,10 @@ namespace android {
 
 // Codec 2.0 interface describing the V4L2EncodeComponent. This interface is used by the codec 2.0
 // framework to query the component's capabilities and request configuration changes.
-class V4L2EncodeInterface : public C2InterfaceHelper {
+class EncodeInterface : public C2InterfaceHelper {
 public:
-    V4L2EncodeInterface(const C2String& name, std::shared_ptr<C2ReflectorHelper> helper);
+    EncodeInterface(const C2String& name, std::shared_ptr<C2ReflectorHelper> helper,
+                    const SupportedCapabilities& caps);
 
     // Interfaces for the V4L2EncodeInterface
     // Note: these getters are not thread-safe. For dynamic parameters, component should use
@@ -53,7 +55,7 @@ public:
     void setFramerate(uint32_t framerate) { mFrameRate->value = framerate; }
 
 protected:
-    void Initialize(const C2String& name);
+    void Initialize(const C2String& name, const SupportedCapabilities& caps);
 
     // Configurable parameter setters.
     static C2R H264ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& info,
@@ -70,6 +72,10 @@ protected:
     static C2R IntraRefreshPeriodSetter(bool mayBlock,
                                         C2P<C2StreamIntraRefreshTuning::output>& period);
 
+    // Recorded lowest configured level
+    // Is static for the need to use H264ProfileLevelSetter as a setter
+    static C2Config::level_t lowestConfigLevel;
+
     // Constant parameters
 
     // The kind of the component; should be C2Component::KIND_ENCODER.
@@ -121,4 +127,4 @@ protected:
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODE_INTERFACE_H
+#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_ENCODE_INTERFACE_H
diff --git a/components/include/v4l2_codec2/components/VideoEncoder.h b/components/include/v4l2_codec2/components/VideoEncoder.h
index 7e5a3c24190d48e1755773d60313bfafb511f448..eeb63c2ac2c86450494e1aee1130a9b0f6ac4a6e 100644
--- a/components/include/v4l2_codec2/components/VideoEncoder.h
+++ b/components/include/v4l2_codec2/components/VideoEncoder.h
@@ -22,13 +22,21 @@ struct BitstreamBuffer;
 
 class VideoEncoder {
 public:
+    // Number of buffers on component delays.
+    static constexpr size_t kInputBufferCount = 2;
+    static constexpr size_t kOutputBufferCount = 2;
+    static constexpr VideoPixelFormat kInputPixelFormat = VideoPixelFormat::NV12;
+
+    // The peak bitrate in function of the target bitrate, used when the bitrate mode is VBR.
+    static constexpr uint32_t kPeakBitrateMultiplier = 2u;
+
     // The InputFrame class can be used to store raw video frames.
     // Note: The InputFrame does not take ownership of the data. The file descriptor is not
     //       duplicated and the caller is responsible for keeping the data alive until the buffer
     //       is returned by an InputBufferDoneCB() call.
     class InputFrame {
     public:
-        InputFrame(std::vector<int>&& fds, std::vector<VideoFramePlane>&& planes,
+        InputFrame(std::vector<int>&& fds, const std::vector<VideoFramePlane>& planes,
                    VideoPixelFormat pixelFormat, uint64_t index, int64_t timestamp);
         ~InputFrame() = default;
 
diff --git a/plugin_store/Android.bp b/plugin_store/Android.bp
index 6427512453db234b34d61bc745aff2fb9feaf722..4eeb52dd29814ea7406b71658a89b9e927b21fc8 100644
--- a/plugin_store/Android.bp
+++ b/plugin_store/Android.bp
@@ -9,7 +9,7 @@ package {
 
 cc_library_shared {
     name: "libc2plugin_store",
-    vendor: true,
+    vendor_available: true,
 
     defaults: [
         "libcodec2-impl-defaults",
diff --git a/plugin_store/V4L2PluginStore.cpp b/plugin_store/V4L2PluginStore.cpp
index 9f9f2622a29935d6f725afbaf83e68894d0c1118..1ad624cbc4c05674527f4c15a28cf60b38d98810 100644
--- a/plugin_store/V4L2PluginStore.cpp
+++ b/plugin_store/V4L2PluginStore.cpp
@@ -21,14 +21,30 @@
 
 namespace android {
 
+static std::mutex sAllocatorLoaderMutex;
+
+// Using optional because in the case of library being no availiable, we do not want to retry its search.
+static std::optional<std::unique_ptr<VendorAllocatorLoader>> sAllocatorLoader = std::nullopt;
+
+const std::unique_ptr<VendorAllocatorLoader>& getAllocatorLoader() {
+    {
+        std::lock_guard<std::mutex> lock(sAllocatorLoaderMutex);
+
+        if (sAllocatorLoader == std::nullopt) {
+            sAllocatorLoader = VendorAllocatorLoader::Create();
+        }
+    }
+
+    return *sAllocatorLoader;
+}
+
 C2Allocator* createAllocator(C2Allocator::id_t allocatorId) {
     ALOGV("%s(allocatorId=%d)", __func__, allocatorId);
-    static std::unique_ptr<VendorAllocatorLoader> sAllocatorLoader =
-            VendorAllocatorLoader::Create();
 
-    if (sAllocatorLoader != nullptr) {
+    auto& allocatorLoader = getAllocatorLoader();
+    if (allocatorLoader != nullptr) {
         ALOGD("%s(): Create C2Allocator (id=%u) from VendorAllocatorLoader", __func__, allocatorId);
-        return sAllocatorLoader->createAllocator(allocatorId);
+        return allocatorLoader->createAllocator(allocatorId);
     }
 
     ALOGI("%s(): Fallback to create C2AllocatorGralloc(id=%u)", __func__, allocatorId);
@@ -59,6 +75,16 @@ std::shared_ptr<C2Allocator> fetchAllocator(C2Allocator::id_t allocatorId) {
 C2BlockPool* createBlockPool(C2Allocator::id_t allocatorId, C2BlockPool::local_id_t poolId) {
     ALOGV("%s(allocatorId=%d, poolId=%" PRIu64 ")", __func__, allocatorId, poolId);
 
+    auto& allocatorLoader = getAllocatorLoader();
+    if (allocatorLoader != nullptr) {
+        ALOGD("%s(): Create C2BlockPool (id=%u) from VendorAllocatorLoader", __func__, allocatorId);
+        C2BlockPool* pool = allocatorLoader->createBlockPool(allocatorId, poolId);
+        ;
+        if (pool != nullptr) {
+            return pool;
+        }
+    }
+
     std::shared_ptr<C2Allocator> allocator = fetchAllocator(allocatorId);
     if (allocator == nullptr) {
         ALOGE("%s(): Failed to create allocator id=%u", __func__, allocatorId);
diff --git a/plugin_store/VendorAllocatorLoader.cpp b/plugin_store/VendorAllocatorLoader.cpp
index 438df95fa6ecadfe9c722fdee2d1241717191947..6f6bf8c59c44fa4e6a9a7f64b048ead88c5d0b07 100644
--- a/plugin_store/VendorAllocatorLoader.cpp
+++ b/plugin_store/VendorAllocatorLoader.cpp
@@ -8,6 +8,7 @@
 #include <v4l2_codec2/plugin_store/VendorAllocatorLoader.h>
 
 #include <dlfcn.h>
+#include <cinttypes>
 
 #include <log/log.h>
 
@@ -15,6 +16,7 @@ namespace android {
 namespace {
 const char* kLibPath = "libv4l2_codec2_vendor_allocator.so";
 const char* kCreateAllocatorFuncName = "CreateAllocator";
+const char* kCreateBlockPoolFuncName = "CreateBlockPool";
 }  // namespace
 
 // static
@@ -29,18 +31,24 @@ std::unique_ptr<VendorAllocatorLoader> VendorAllocatorLoader::Create() {
 
     auto createAllocatorFunc = (CreateAllocatorFunc)dlsym(libHandle, kCreateAllocatorFuncName);
     if (!createAllocatorFunc) {
-        ALOGE("%s(): Failed to load functions: %s", __func__, kCreateAllocatorFuncName);
-        dlclose(libHandle);
-        return nullptr;
+        ALOGW("%s(): Failed to load functions: %s", __func__, kCreateAllocatorFuncName);
+    }
+
+    auto crateBlockPoolFunc = (CreateBlockPoolFunc)dlsym(libHandle, kCreateBlockPoolFuncName);
+    if (!crateBlockPoolFunc) {
+        ALOGW("%s(): Failed to load functions: %s", __func__, kCreateAllocatorFuncName);
     }
 
     return std::unique_ptr<VendorAllocatorLoader>(
-            new VendorAllocatorLoader(libHandle, createAllocatorFunc));
+            new VendorAllocatorLoader(libHandle, createAllocatorFunc, crateBlockPoolFunc));
 }
 
 VendorAllocatorLoader::VendorAllocatorLoader(void* libHandle,
-                                             CreateAllocatorFunc createAllocatorFunc)
-      : mLibHandle(libHandle), mCreateAllocatorFunc(createAllocatorFunc) {
+                                             CreateAllocatorFunc createAllocatorFunc,
+                                             CreateBlockPoolFunc createBlockPoolFunc)
+      : mLibHandle(libHandle),
+        mCreateAllocatorFunc(createAllocatorFunc),
+        mCreateBlockPoolFunc(createBlockPoolFunc) {
     ALOGV("%s()", __func__);
 }
 
@@ -53,7 +61,22 @@ VendorAllocatorLoader::~VendorAllocatorLoader() {
 C2Allocator* VendorAllocatorLoader::createAllocator(C2Allocator::id_t allocatorId) {
     ALOGV("%s(%d)", __func__, allocatorId);
 
+    if (!mCreateAllocatorFunc) {
+        return nullptr;
+    }
+
     return mCreateAllocatorFunc(allocatorId);
 }
 
+C2BlockPool* VendorAllocatorLoader::createBlockPool(C2Allocator::id_t allocatorId,
+                                                    C2BlockPool::local_id_t poolId) {
+    ALOGV("%s(allocatorId=%d, poolId=%" PRIu64 " )", __func__, allocatorId, poolId);
+
+    if (!mCreateBlockPoolFunc) {
+        return nullptr;
+    }
+
+    return mCreateBlockPoolFunc(allocatorId, poolId);
+}
+
 }  // namespace android
diff --git a/plugin_store/include/v4l2_codec2/plugin_store/VendorAllocatorLoader.h b/plugin_store/include/v4l2_codec2/plugin_store/VendorAllocatorLoader.h
index f3f7613d51a2cc3400a57b6b9433447ed86442eb..9cbee0fb45126bb4a1dafc9aaabf9fd5a43e6634 100644
--- a/plugin_store/include/v4l2_codec2/plugin_store/VendorAllocatorLoader.h
+++ b/plugin_store/include/v4l2_codec2/plugin_store/VendorAllocatorLoader.h
@@ -18,6 +18,8 @@ namespace android {
 class VendorAllocatorLoader {
 public:
     using CreateAllocatorFunc = ::C2Allocator* (*)(C2Allocator::id_t /* allocatorId */);
+    using CreateBlockPoolFunc = ::C2BlockPool* (*)(C2Allocator::id_t /* allocatorId */,
+                                                   C2BlockPool::local_id_t /* poolId */);
 
     static std::unique_ptr<VendorAllocatorLoader> Create();
     ~VendorAllocatorLoader();
@@ -26,11 +28,15 @@ public:
     // V4L2AllocatorId.h.
     C2Allocator* createAllocator(C2Allocator::id_t allocatorId);
 
+    C2BlockPool* createBlockPool(C2Allocator::id_t allocatorId, C2BlockPool::local_id_t poolId);
+
 private:
-    VendorAllocatorLoader(void* libHandle, CreateAllocatorFunc createAllocatorFunc);
+    VendorAllocatorLoader(void* libHandle, CreateAllocatorFunc createAllocatorFunc,
+                          CreateBlockPoolFunc createBlockPoolFunc);
 
     void* mLibHandle;
     CreateAllocatorFunc mCreateAllocatorFunc;
+    CreateBlockPoolFunc mCreateBlockPoolFunc;
 };
 
 }  // namespace android
diff --git a/service/Android.bp b/service/Android.bp
index d2d70c77a4335744e812370e1832bc98b3b8dc21..7a0bc4123452b11aa950b7d651040c5251eb688d 100644
--- a/service/Android.bp
+++ b/service/Android.bp
@@ -7,8 +7,8 @@ package {
     default_applicable_licenses: ["external_v4l2_codec2_license"],
 }
 
-cc_binary {
-    name: "android.hardware.media.c2@1.2-service-v4l2",
+cc_defaults {
+    name: "libv4l2_codec2_service",
 
     defaults: [
         "hidl_defaults",
@@ -22,6 +22,7 @@ cc_binary {
     ],
 
     shared_libs: [
+        "libv4l2_codec2_v4l2",
         "libv4l2_codec2_components",
         "libavservices_minijail",
         "libchrome",
@@ -32,6 +33,22 @@ cc_binary {
     ],
 
     required: ["android.hardware.media.c2-default-seccomp_policy"],
+}
+
+cc_binary {
+    name: "android.hardware.media.c2@1.2-service-v4l2",
+
+    defaults: [
+        "libv4l2_codec2_service",
+    ],
+
+    shared_libs: [
+        "libv4l2_codec2_v4l2",
+    ],
+
+    cflags: [
+        "-DV4L2_CODEC2_SERVICE_V4L2_STORE",
+    ],
 
     compile_multilib: "both",
     multilib: {
diff --git a/service/service.cpp b/service/service.cpp
index bde1521fc1f1ce5909a392a1fa2f32705adc96dc..03865ef95a73cd6606c896e8cc884e4fcc3b7a3a 100644
--- a/service/service.cpp
+++ b/service/service.cpp
@@ -3,7 +3,11 @@
 // found in the LICENSE file.
 
 //#define LOG_NDEBUG 0
+#ifdef V4L2_CODEC2_SERVICE_V4L2_STORE
 #define LOG_TAG "android.hardware.media.c2@1.0-service-v4l2"
+#else
+#error "V4L2_CODEC2_SERVICE_V4L2_STORE has to be defined"
+#endif
 
 #include <C2Component.h>
 #include <base/logging.h>
@@ -12,7 +16,9 @@
 #include <log/log.h>
 #include <minijail.h>
 
-#include <v4l2_codec2/components/V4L2ComponentStore.h>
+#ifdef V4L2_CODEC2_SERVICE_V4L2_STORE
+#include <v4l2_codec2/v4l2/V4L2ComponentStore.h>
+#endif
 
 // This is the absolute on-device path of the prebuild_etc module
 // "android.hardware.media.c2-default-seccomp_policy" in Android.bp.
@@ -34,7 +40,7 @@ int main(int /* argc */, char** /* argv */) {
 
     // Extra threads may be needed to handle a stacked IPC sequence that
     // contains alternating binder and hwbinder calls. (See b/35283480.)
-    android::hardware::configureRpcThreadpool(8, true /* callerWillJoin */);
+    android::hardware::configureRpcThreadpool(16, true /* callerWillJoin */);
 
 #if LOG_NDEBUG == 0
     ALOGD("Enable all verbose logging of libchrome");
@@ -44,12 +50,15 @@ int main(int /* argc */, char** /* argv */) {
     // Create IComponentStore service.
     {
         using namespace ::android::hardware::media::c2::V1_2;
+        android::sp<IComponentStore> store = nullptr;
 
+#ifdef V4L2_CODEC2_SERVICE_V4L2_STORE
         ALOGD("Instantiating Codec2's V4L2 IComponentStore service...");
-        android::sp<IComponentStore> store(
-                new utils::ComponentStore(android::V4L2ComponentStore::Create()));
+        store = new utils::ComponentStore(android::V4L2ComponentStore::Create());
+#endif
+
         if (store == nullptr) {
-            ALOGE("Cannot create Codec2's V4L2 IComponentStore service.");
+            ALOGE("Cannot create Codec2's IComponentStore service.");
         } else if (store->registerAsService("default") != android::OK) {
             ALOGE("Cannot register Codec2's IComponentStore service.");
         } else {
diff --git a/tests/c2_comp_intf/C2VEACompIntf_test.cpp b/tests/c2_comp_intf/C2VEACompIntf_test.cpp
index 4a07b9487493a6b6434a0d6311e664ad90eb8e6c..cc1a5c019f8c96e0c8f9dd04ff91cfd297f6be8d 100644
--- a/tests/c2_comp_intf/C2VEACompIntf_test.cpp
+++ b/tests/c2_comp_intf/C2VEACompIntf_test.cpp
@@ -35,8 +35,8 @@ class C2VEACompIntfTest: public C2CompIntfTest {
 protected:
     C2VEACompIntfTest() {
         mReflector = std::make_shared<C2ReflectorHelper>();
-        auto componentInterface = std::make_shared<V4L2EncodeInterface>(testCompName, mReflector);
-        mIntf = std::shared_ptr<C2ComponentInterface>(new SimpleInterface<V4L2EncodeInterface>(
+        auto componentInterface = std::make_shared<EncodeInterface>(testCompName, mReflector);
+        mIntf = std::shared_ptr<C2ComponentInterface>(new SimpleInterface<EncodeInterface>(
                 testCompName, testCompNodeId, componentInterface));
     }
     ~C2VEACompIntfTest() override {
diff --git a/tests/c2_e2e_test/jni/common.cpp b/tests/c2_e2e_test/jni/common.cpp
index 673e36c62e74a4e3c86413bce3acbf095156d087..75fa1f768ed0465a0e4282df13783f146fc6c1c8 100644
--- a/tests/c2_e2e_test/jni/common.cpp
+++ b/tests/c2_e2e_test/jni/common.cpp
@@ -171,6 +171,17 @@ bool OutputFile::WriteFrame(uint32_t data_size, const uint8_t* data) {
     }
 }
 
+// Reference: (https://source.chromium.org/chromium/chromium/src/+/main:
+//             media/gpu/video_decode_accelerator_perf_tests.cc
+PerformanceTimeStats::PerformanceTimeStats(const std::vector<double>& times) {
+    avg_us_ = std::accumulate(times.begin(), times.end(), 0.0) / times.size();
+    std::vector<double> sorted_times = times;
+    std::sort(sorted_times.begin(), sorted_times.end());
+    percentile_25_us_ = sorted_times[sorted_times.size() / 4];
+    percentile_50_us_ = sorted_times[sorted_times.size() / 2];
+    percentile_75_us_ = sorted_times[(sorted_times.size() * 3) / 4];
+}
+
 bool FPSCalculator::RecordFrameTimeDiff() {
     int64_t now_us = GetNowUs();
     if (last_frame_time_us_ != 0) {
@@ -198,6 +209,10 @@ double FPSCalculator::CalculateFPS() const {
     return 1E6 / moving_avgs[index];
 }
 
+PerformanceTimeStats FPSCalculator::CalucalateDeliveryTimeStats() const {
+    return PerformanceTimeStats(frame_time_diffs_us_);
+}
+
 // Reference: (https://cs.corp.google.com/android/cts/common/device-side/util/
 //             src/com/android/compatibility/common/util/MediaUtils.java)
 //            movingAverageOverSum
diff --git a/tests/c2_e2e_test/jni/common.h b/tests/c2_e2e_test/jni/common.h
index b28fd3a40143c5c554f1216a5d5f6ce83ec5ff47..1425b620b30423b258b5e81e98762dfc93424755 100644
--- a/tests/c2_e2e_test/jni/common.h
+++ b/tests/c2_e2e_test/jni/common.h
@@ -142,6 +142,15 @@ private:
     uint64_t frame_index_ = 0;
 };
 
+struct PerformanceTimeStats {
+    PerformanceTimeStats() {}
+    explicit PerformanceTimeStats(const std::vector<double>& times);
+    double avg_us_ = 0.0;
+    double percentile_25_us_ = 0.0;
+    double percentile_50_us_ = 0.0;
+    double percentile_75_us_ = 0.0;
+};
+
 // The helper class to calculate FPS.
 class FPSCalculator {
 public:
@@ -152,6 +161,9 @@ public:
     // Calucalate FPS value.
     double CalculateFPS() const;
 
+    // Calucalate delivery time stats.
+    PerformanceTimeStats CalucalateDeliveryTimeStats() const;
+
 private:
     static constexpr double kMovingAvgWindowUs = 1000000;
     static constexpr double kRegardedPercentile = 95;
diff --git a/tests/c2_e2e_test/jni/video_decoder_e2e_test.cpp b/tests/c2_e2e_test/jni/video_decoder_e2e_test.cpp
index 62589c2818a482520ecb0114b48d6dd3a1407038..5dc5c5b37264ab1632aa587f7c790b8cc4fefe3b 100644
--- a/tests/c2_e2e_test/jni/video_decoder_e2e_test.cpp
+++ b/tests/c2_e2e_test/jni/video_decoder_e2e_test.cpp
@@ -360,6 +360,15 @@ void C2VideoDecoderE2ETest::TestFPSBody() {
     printf("[LOG] Measured decoder FPS: %.4f\n", fps);
     EXPECT_GE(fps, static_cast<double>(g_env->min_fps_no_render()));
     printf("[LOG] Dropped frames rate: %lf\n", decoder_->dropped_frame_rate());
+
+    auto delivery_time = fps_calculator.CalucalateDeliveryTimeStats();
+    printf("[LOG] Measured delivery time average: %.4f\n", delivery_time.avg_us_ / 1E3);
+    printf("[LOG] Measured delivery time 25 percentile: %.4f\n",
+           delivery_time.percentile_25_us_ / 1E3);
+    printf("[LOG] Measured delivery time 50 percentile: %.4f\n",
+           delivery_time.percentile_50_us_ / 1E3);
+    printf("[LOG] Measured delivery time 75 percentile: %.4f\n",
+           delivery_time.percentile_75_us_ / 1E3);
 }
 
 TEST_F(C2VideoDecoderSurfaceE2ETest, TestFPS) {
diff --git a/v4l2/Android.bp b/v4l2/Android.bp
new file mode 100644
index 0000000000000000000000000000000000000000..fe8043bb04f6d3d2cf7a0ae5d3ed5284c094fa87
--- /dev/null
+++ b/v4l2/Android.bp
@@ -0,0 +1,57 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "external_v4l2_codec2_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: ["external_v4l2_codec2_license"],
+}
+
+cc_library {
+    name: "libv4l2_codec2_v4l2",
+    vendor: true,
+
+    defaults: [
+        "libcodec2-hidl-defaults",
+    ],
+
+    srcs: [
+        "V4L2ComponentCommon.cpp",
+        "V4L2ComponentFactory.cpp",
+        "V4L2ComponentStore.cpp",
+        "V4L2Decoder.cpp",
+        "V4L2DecodeComponent.cpp",
+        "V4L2Device.cpp",
+        "V4L2DevicePoller.cpp",
+        "V4L2Encoder.cpp",
+        "V4L2EncodeComponent.cpp",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "libcodec2_internal",
+    ],
+    shared_libs: [
+        "android.hardware.graphics.common@1.0",
+        "libc2plugin_store",
+        "libchrome",
+        "libcodec2_soft_common",
+        "libcutils",
+        "liblog",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_bufferqueue_helper",
+        "libstagefright_foundation",
+        "libui",
+        "libv4l2_codec2_common",
+        "libv4l2_codec2_components",
+    ],
+
+    cflags: [
+      "-Werror",
+      "-Wall",
+      "-Wno-unused-parameter",  // needed for libchrome/base codes
+      "-Wthread-safety",
+    ],
+}
diff --git a/common/V4L2ComponentCommon.cpp b/v4l2/V4L2ComponentCommon.cpp
similarity index 53%
rename from common/V4L2ComponentCommon.cpp
rename to v4l2/V4L2ComponentCommon.cpp
index f67a516dfa753a79f12a522393a95529ec4d395c..9520341a0dc8c82604444659bb72cbdd9b603dac 100644
--- a/common/V4L2ComponentCommon.cpp
+++ b/v4l2/V4L2ComponentCommon.cpp
@@ -5,9 +5,10 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "V4L2ComponentCommon"
 
-#include <v4l2_codec2/common/V4L2ComponentCommon.h>
+#include <v4l2_codec2/v4l2/V4L2ComponentCommon.h>
 
 #include <log/log.h>
+#include <set>
 
 namespace android {
 
@@ -25,7 +26,7 @@ const std::string V4L2ComponentName::kVP9SecureDecoder = "c2.v4l2.vp9.decoder.se
 const std::string V4L2ComponentName::kHEVCSecureDecoder = "c2.v4l2.hevc.decoder.secure";
 
 // static
-bool V4L2ComponentName::isValid(const char* name) {
+bool V4L2ComponentName::isValid(const std::string& name) {
     return name == kH264Encoder || name == kVP8Encoder || name == kVP9Encoder ||
            name == kH264Decoder || name == kVP8Decoder || name == kVP9Decoder ||
            name == kHEVCDecoder || name == kH264SecureDecoder || name == kVP8SecureDecoder ||
@@ -33,10 +34,44 @@ bool V4L2ComponentName::isValid(const char* name) {
 }
 
 // static
-bool V4L2ComponentName::isEncoder(const char* name) {
+bool V4L2ComponentName::isEncoder(const std::string& name) {
     ALOG_ASSERT(isValid(name));
 
     return name == kH264Encoder || name == kVP8Encoder || name == kVP9Encoder;
 }
 
+// static
+bool V4L2ComponentName::isDecoder(const std::string& name) {
+    ALOG_ASSERT(isValid(name));
+    static const std::set<std::string> kValidDecoders = {
+            kH264Decoder, kH264SecureDecoder, kVP8Decoder,  kVP8SecureDecoder,
+            kVP9Decoder,  kVP9SecureDecoder,  kHEVCDecoder, kHEVCSecureDecoder,
+    };
+
+    return kValidDecoders.find(name) != kValidDecoders.end();
+}
+
+// static
+std::optional<VideoCodec> V4L2ComponentName::getCodec(const std::string& name) {
+    ALOG_ASSERT(isValid(name));
+    static const std::map<std::string, VideoCodec> kNameToCodecs = {
+            {kH264Decoder, VideoCodec::H264}, {kH264SecureDecoder, VideoCodec::H264},
+            {kH264Encoder, VideoCodec::H264},
+
+            {kVP8Decoder, VideoCodec::VP8},   {kVP8SecureDecoder, VideoCodec::VP8},
+            {kVP8Encoder, VideoCodec::VP8},
+
+            {kVP9Decoder, VideoCodec::VP9},   {kVP9SecureDecoder, VideoCodec::VP9},
+            {kVP9Encoder, VideoCodec::VP9},
+
+            {kHEVCDecoder, VideoCodec::HEVC}, {kHEVCSecureDecoder, VideoCodec::HEVC},
+    };
+
+    auto iter = kNameToCodecs.find(name);
+    if (iter == kNameToCodecs.end()) {
+        return std::nullopt;
+    }
+    return iter->second;
+}
+
 }  // namespace android
diff --git a/components/V4L2ComponentFactory.cpp b/v4l2/V4L2ComponentFactory.cpp
similarity index 55%
rename from components/V4L2ComponentFactory.cpp
rename to v4l2/V4L2ComponentFactory.cpp
index a3f883706ca45b897dd09f34f8cda545da1c4435..4493a22daeef46d526e65c1814a2c8206720fac3 100644
--- a/components/V4L2ComponentFactory.cpp
+++ b/v4l2/V4L2ComponentFactory.cpp
@@ -5,16 +5,18 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "V4L2ComponentFactory"
 
-#include <v4l2_codec2/components/V4L2ComponentFactory.h>
+#include <v4l2_codec2/v4l2/V4L2ComponentFactory.h>
 
 #include <codec2/hidl/1.0/InputBufferManager.h>
 #include <log/log.h>
 
-#include <v4l2_codec2/common/V4L2ComponentCommon.h>
-#include <v4l2_codec2/components/V4L2DecodeComponent.h>
-#include <v4l2_codec2/components/V4L2DecodeInterface.h>
-#include <v4l2_codec2/components/V4L2EncodeComponent.h>
-#include <v4l2_codec2/components/V4L2EncodeInterface.h>
+#include <v4l2_codec2/common/Common.h>
+#include <v4l2_codec2/components/DecodeInterface.h>
+#include <v4l2_codec2/components/EncodeInterface.h>
+#include <v4l2_codec2/v4l2/V4L2ComponentCommon.h>
+#include <v4l2_codec2/v4l2/V4L2DecodeComponent.h>
+#include <v4l2_codec2/v4l2/V4L2Device.h>
+#include <v4l2_codec2/v4l2/V4L2EncodeComponent.h>
 
 namespace android {
 
@@ -52,7 +54,7 @@ V4L2ComponentFactory::V4L2ComponentFactory(const std::string& componentName, boo
     constexpr nsecs_t kMinFrameIntervalNs = 1000000000ull / 60;
     uint32_t delayCount = 0;
     for (auto c : kAllCodecs) {
-        delayCount = std::max(delayCount, V4L2DecodeInterface::getOutputDelay(c));
+        delayCount = std::max(delayCount, DecodeInterface::getOutputDelay(c));
     }
     utils::InputBufferManager::setNotificationInterval(delayCount * kMinFrameIntervalNs / 2);
 }
@@ -69,9 +71,21 @@ c2_status_t V4L2ComponentFactory::createComponent(c2_node_id_t id,
     }
 
     if (mIsEncoder) {
-        *component = V4L2EncodeComponent::create(mComponentName, id, mReflector, deleter);
+        std::shared_ptr<EncodeInterface> intfImpl;
+        c2_status_t status = createEncodeInterface(&intfImpl);
+        if (status != C2_OK) {
+            return status;
+        }
+
+        *component = V4L2EncodeComponent::create(mComponentName, id, std::move(intfImpl), deleter);
     } else {
-        *component = V4L2DecodeComponent::create(mComponentName, id, mReflector, deleter);
+        std::shared_ptr<DecodeInterface> intfImpl;
+        c2_status_t status = createDecodeInterface(&intfImpl);
+        if (status != C2_OK) {
+            return status;
+        }
+
+        *component = V4L2DecodeComponent::create(mComponentName, id, std::move(intfImpl), deleter);
     }
     return *component ? C2_OK : C2_NO_MEMORY;
 }
@@ -87,20 +101,68 @@ c2_status_t V4L2ComponentFactory::createInterface(
     }
 
     if (mIsEncoder) {
+        std::shared_ptr<EncodeInterface> intfImpl;
+        c2_status_t status = createEncodeInterface(&intfImpl);
+        if (status != C2_OK) {
+            return status;
+        }
+
         *interface = std::shared_ptr<C2ComponentInterface>(
-                new SimpleInterface<V4L2EncodeInterface>(
-                        mComponentName.c_str(), id,
-                        std::make_shared<V4L2EncodeInterface>(mComponentName, mReflector)),
+                new SimpleInterface<EncodeInterface>(mComponentName.c_str(), id,
+                                                     std::move(intfImpl)),
                 deleter);
         return C2_OK;
     } else {
+        std::shared_ptr<DecodeInterface> intfImpl;
+        c2_status_t status = createDecodeInterface(&intfImpl);
+        if (status != C2_OK) {
+            return status;
+        }
+
         *interface = std::shared_ptr<C2ComponentInterface>(
-                new SimpleInterface<V4L2DecodeInterface>(
-                        mComponentName.c_str(), id,
-                        std::make_shared<V4L2DecodeInterface>(mComponentName, mReflector)),
+                new SimpleInterface<DecodeInterface>(mComponentName.c_str(), id,
+                                                     std::move(intfImpl)),
                 deleter);
         return C2_OK;
     }
 }
 
+c2_status_t V4L2ComponentFactory::createEncodeInterface(
+        std::shared_ptr<EncodeInterface>* intfImpl) {
+    if (!mCapabilites) {
+        auto codec = V4L2ComponentName::getCodec(mComponentName);
+        if (!codec) {
+            return C2_CORRUPTED;
+        }
+        mCapabilites = std::make_unique<SupportedCapabilities>(
+                V4L2Device::queryEncodingCapabilities(*codec));
+    }
+
+    *intfImpl = std::make_shared<EncodeInterface>(mComponentName, mReflector, *mCapabilites);
+    if (*intfImpl == nullptr) {
+        return C2_NO_MEMORY;
+    }
+
+    return (*intfImpl)->status();
+}
+
+c2_status_t V4L2ComponentFactory::createDecodeInterface(
+        std::shared_ptr<DecodeInterface>* intfImpl) {
+    if (!mCapabilites) {
+        auto codec = V4L2ComponentName::getCodec(mComponentName);
+        if (!codec) {
+            return C2_CORRUPTED;
+        }
+        mCapabilites = std::make_unique<SupportedCapabilities>(
+                V4L2Device::queryDecodingCapabilities(*codec));
+    }
+
+    *intfImpl = std::make_shared<DecodeInterface>(mComponentName, mReflector, *mCapabilites);
+    if (*intfImpl == nullptr) {
+        return C2_NO_MEMORY;
+    }
+
+    return (*intfImpl)->status();
+}
+
 }  // namespace android
diff --git a/v4l2/V4L2ComponentStore.cpp b/v4l2/V4L2ComponentStore.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..927a4c04c1565056d6865bce9d4263bf2ce6a92b
--- /dev/null
+++ b/v4l2/V4L2ComponentStore.cpp
@@ -0,0 +1,65 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "V4L2ComponentStore"
+
+#include <v4l2_codec2/v4l2/V4L2ComponentStore.h>
+
+#include <stdint.h>
+
+#include <memory>
+#include <mutex>
+
+#include <C2.h>
+#include <C2Config.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <v4l2_codec2/components/ComponentStore.h>
+#include <v4l2_codec2/v4l2/V4L2ComponentCommon.h>
+#include <v4l2_codec2/v4l2/V4L2ComponentFactory.h>
+
+namespace android {
+
+// static
+std::shared_ptr<C2ComponentStore> V4L2ComponentStore::Create() {
+    ALOGV("%s()", __func__);
+
+    static std::mutex mutex;
+    static std::weak_ptr<C2ComponentStore> platformStore;
+
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2ComponentStore> store = platformStore.lock();
+    if (store != nullptr) return store;
+
+    auto builder = ComponentStore::Builder("android.componentStore.v4l2");
+
+    builder.encoder(V4L2ComponentName::kH264Encoder, VideoCodec::H264,
+                    &V4L2ComponentFactory::create);
+    builder.encoder(V4L2ComponentName::kVP8Encoder, VideoCodec::VP8, &V4L2ComponentFactory::create);
+    builder.encoder(V4L2ComponentName::kVP9Encoder, VideoCodec::VP9, &V4L2ComponentFactory::create);
+
+    builder.decoder(V4L2ComponentName::kH264Decoder, VideoCodec::H264,
+                    &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kVP8Decoder, VideoCodec::VP8, &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kVP9Decoder, VideoCodec::VP9, &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kHEVCDecoder, VideoCodec::HEVC,
+                    &V4L2ComponentFactory::create);
+
+    builder.decoder(V4L2ComponentName::kH264SecureDecoder, VideoCodec::H264,
+                    &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kVP8SecureDecoder, VideoCodec::VP8,
+                    &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kVP9SecureDecoder, VideoCodec::VP9,
+                    &V4L2ComponentFactory::create);
+    builder.decoder(V4L2ComponentName::kHEVCSecureDecoder, VideoCodec::HEVC,
+                    &V4L2ComponentFactory::create);
+
+    store = std::shared_ptr<C2ComponentStore>(std::move(builder).build());
+    platformStore = store;
+    return store;
+}
+
+}  // namespace android
diff --git a/v4l2/V4L2DecodeComponent.cpp b/v4l2/V4L2DecodeComponent.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..284b094bdaad3f237adc51a2462e4fe8e9b09285
--- /dev/null
+++ b/v4l2/V4L2DecodeComponent.cpp
@@ -0,0 +1,124 @@
+// Copyright 2023 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "V4L2DecodeComponent"
+
+#include <v4l2_codec2/v4l2/V4L2DecodeComponent.h>
+#include <v4l2_codec2/v4l2/V4L2Decoder.h>
+
+#include <base/bind.h>
+#include <base/callback_helpers.h>
+
+#include <cutils/properties.h>
+#include <utils/Trace.h>
+
+namespace android {
+
+namespace {
+// CCBC pauses sending input buffers to the component when all the output slots are filled by
+// pending decoded buffers. If the available output buffers are exhausted before CCBC pauses sending
+// input buffers, CCodec may timeout due to waiting for a available output buffer.
+// This function returns the minimum number of output buffers to prevent the buffers from being
+// exhausted before CCBC pauses sending input buffers.
+size_t getMinNumOutputBuffers(VideoCodec codec) {
+    // The constant values copied from CCodecBufferChannel.cpp.
+    // (b/184020290): Check the value still sync when seeing error message from CCodec:
+    // "previous call to queue exceeded timeout".
+    constexpr size_t kSmoothnessFactor = 4;
+    constexpr size_t kRenderingDepth = 3;
+    // Extra number of needed output buffers for V4L2Decoder.
+    constexpr size_t kExtraNumOutputBuffersForDecoder = 2;
+
+    // The total needed number of output buffers at pipeline are:
+    // - MediaCodec output slots: output delay + kSmoothnessFactor
+    // - Surface: kRenderingDepth
+    // - Component: kExtraNumOutputBuffersForDecoder
+    return DecodeInterface::getOutputDelay(codec) + kSmoothnessFactor + kRenderingDepth +
+           kExtraNumOutputBuffersForDecoder;
+}
+}  // namespace
+
+// static
+std::atomic<int32_t> V4L2DecodeComponent::sConcurrentInstances = 0;
+
+// static
+std::atomic<uint32_t> V4L2DecodeComponent::sNextDebugStreamId = 0;
+
+// static
+std::shared_ptr<C2Component> V4L2DecodeComponent::create(
+        const std::string& name, c2_node_id_t id, std::shared_ptr<DecodeInterface> intfImpl,
+        C2ComponentFactory::ComponentDeleter deleter) {
+    static const int32_t kMaxConcurrentInstances =
+            property_get_int32("ro.vendor.v4l2_codec2.decode_concurrent_instances", -1);
+    static std::mutex mutex;
+
+    std::lock_guard<std::mutex> lock(mutex);
+
+    if (kMaxConcurrentInstances >= 0 && sConcurrentInstances.load() >= kMaxConcurrentInstances) {
+        ALOGW("Reject to Initialize() due to too many instances: %d", sConcurrentInstances.load());
+        return nullptr;
+    } else if (sConcurrentInstances.load() == 0) {
+        sNextDebugStreamId.store(0, std::memory_order_relaxed);
+    }
+
+    uint32_t debugStreamId = sNextDebugStreamId.fetch_add(1, std::memory_order_relaxed);
+    return std::shared_ptr<C2Component>(
+            new V4L2DecodeComponent(debugStreamId, name, id, std::move(intfImpl)), deleter);
+}
+
+V4L2DecodeComponent::V4L2DecodeComponent(uint32_t debugStreamId, const std::string& name,
+                                         c2_node_id_t id, std::shared_ptr<DecodeInterface> intfImpl)
+      : DecodeComponent(debugStreamId, name, id, intfImpl) {
+    ALOGV("%s(): ", __func__);
+    sConcurrentInstances.fetch_add(1, std::memory_order_relaxed);
+}
+
+V4L2DecodeComponent::~V4L2DecodeComponent() {
+    ALOGV("%s(): ", __func__);
+    sConcurrentInstances.fetch_sub(1, std::memory_order_relaxed);
+}
+
+void V4L2DecodeComponent::startTask(c2_status_t* status, ::base::WaitableEvent* done) {
+    ATRACE_CALL();
+    ALOGV("%s()", __func__);
+    ALOG_ASSERT(mDecoderTaskRunner->RunsTasksInCurrentSequence());
+
+    ::base::ScopedClosureRunner done_caller(
+            ::base::BindOnce(&::base::WaitableEvent::Signal, ::base::Unretained(done)));
+    *status = C2_CORRUPTED;
+
+    const auto codec = mIntfImpl->getVideoCodec();
+    if (!codec) {
+        ALOGE("Failed to get video codec.");
+        return;
+    }
+    const size_t inputBufferSize = mIntfImpl->getInputBufferSize();
+    const size_t minNumOutputBuffers = getMinNumOutputBuffers(*codec);
+
+    // ::base::Unretained(this) is safe here because |mDecoder| is always destroyed before
+    // |mDecoderThread| is stopped, so |*this| is always valid during |mDecoder|'s lifetime.
+    mDecoder = V4L2Decoder::Create(mDebugStreamId, *codec, inputBufferSize, minNumOutputBuffers,
+                                   ::base::BindRepeating(&V4L2DecodeComponent::getVideoFramePool,
+                                                         ::base::Unretained(this)),
+                                   ::base::BindRepeating(&V4L2DecodeComponent::onOutputFrameReady,
+                                                         ::base::Unretained(this)),
+                                   ::base::BindRepeating(&V4L2DecodeComponent::reportError,
+                                                         ::base::Unretained(this), C2_CORRUPTED),
+                                   mDecoderTaskRunner, mIsSecure);
+    if (!mDecoder) {
+        ALOGE("Failed to create V4L2Decoder for %s", VideoCodecToString(*codec));
+        return;
+    }
+
+    // Get default color aspects on start.
+    if (!mIsSecure && *codec == VideoCodec::H264) {
+        if (mIntfImpl->queryColorAspects(&mCurrentColorAspects) != C2_OK) return;
+        mPendingColorAspectsChange = false;
+    }
+
+    *status = C2_OK;
+}
+
+}  // namespace android
diff --git a/components/V4L2Decoder.cpp b/v4l2/V4L2Decoder.cpp
similarity index 68%
rename from components/V4L2Decoder.cpp
rename to v4l2/V4L2Decoder.cpp
index cc2c1d19f9cda9987ccffeabf2b07ecfde639d6c..d0d862df6aff36de5969e60bc69f05422d1e63f7 100644
--- a/components/V4L2Decoder.cpp
+++ b/v4l2/V4L2Decoder.cpp
@@ -3,9 +3,10 @@
 // found in the LICENSE file.
 
 //#define LOG_NDEBUG 0
+#define ATRACE_TAG ATRACE_TAG_VIDEO
 #define LOG_TAG "V4L2Decoder"
 
-#include <v4l2_codec2/components/V4L2Decoder.h>
+#include <v4l2_codec2/v4l2/V4L2Decoder.h>
 
 #include <stdint.h>
 
@@ -16,55 +17,69 @@
 #include <base/files/scoped_file.h>
 #include <base/memory/ptr_util.h>
 #include <log/log.h>
+#include <utils/Trace.h>
 
 #include <v4l2_codec2/common/Common.h>
 #include <v4l2_codec2/common/Fourcc.h>
+#include <v4l2_codec2/common/H264NalParser.h>
+#include <v4l2_codec2/common/HEVCNalParser.h>
+#include <v4l2_codec2/plugin_store/DmabufHelpers.h>
 
 namespace android {
 namespace {
 
-constexpr size_t kNumInputBuffers = 16;
 // Extra buffers for transmitting in the whole video pipeline.
 constexpr size_t kNumExtraOutputBuffers = 4;
 
-// Currently we only support flexible pixel 420 format YCBCR_420_888 in Android.
-// Here is the list of flexible 420 format.
-constexpr std::initializer_list<uint32_t> kSupportedOutputFourccs = {
-        Fourcc::YU12, Fourcc::YV12, Fourcc::YM12, Fourcc::YM21,
-        Fourcc::NV12, Fourcc::NV21, Fourcc::NM12, Fourcc::NM21,
-};
-
-uint32_t VideoCodecToV4L2PixFmt(VideoCodec codec) {
-    switch (codec) {
-    case VideoCodec::H264:
-        return V4L2_PIX_FMT_H264;
-    case VideoCodec::VP8:
-        return V4L2_PIX_FMT_VP8;
+bool waitForDRC(const C2ConstLinearBlock& input, std::optional<VideoCodec> codec) {
+    C2ReadView view = input.map().get();
+    const uint8_t* pos = view.data();
+    // frame type takes the (2) position in first byte of VP9  uncompressed header
+    const uint8_t kVP9FrameTypeMask = 0x4;
+    // frame type takes the (0) position in first byte of VP8 uncompressed header
+    const uint8_t kVP8FrameTypeMask = 0x1;
+
+    switch (*codec) {
+    case VideoCodec::H264: {
+        H264NalParser parser(view.data(), view.capacity());
+        return parser.locateIDR();
+    }
+    case VideoCodec::HEVC: {
+        HEVCNalParser parser(view.data(), view.capacity());
+        return parser.locateIDR();
+    }
+    // For VP8 and VP9 it is assumed that the input buffer contains a single
+    // frame that is not fragmented.
     case VideoCodec::VP9:
-        return V4L2_PIX_FMT_VP9;
-    case VideoCodec::HEVC:
-        return V4L2_PIX_FMT_HEVC;
+        // 0 - key frame; 1 - interframe
+        return ((pos[0] & kVP9FrameTypeMask) == 0);
+    case VideoCodec::VP8:
+        // 0 - key frame; 1 - interframe;
+        return ((pos[0] & kVP8FrameTypeMask) == 0);
     }
+
+    return false;
 }
 
 }  // namespace
 
 // static
 std::unique_ptr<VideoDecoder> V4L2Decoder::Create(
-        const VideoCodec& codec, const size_t inputBufferSize, const size_t minNumOutputBuffers,
-        GetPoolCB getPoolCb, OutputCB outputCb, ErrorCB errorCb,
-        scoped_refptr<::base::SequencedTaskRunner> taskRunner) {
+        uint32_t debugStreamId, const VideoCodec& codec, const size_t inputBufferSize,
+        const size_t minNumOutputBuffers, GetPoolCB getPoolCb, OutputCB outputCb, ErrorCB errorCb,
+        scoped_refptr<::base::SequencedTaskRunner> taskRunner, bool isSecure) {
     std::unique_ptr<V4L2Decoder> decoder =
-            ::base::WrapUnique<V4L2Decoder>(new V4L2Decoder(taskRunner));
+            ::base::WrapUnique<V4L2Decoder>(new V4L2Decoder(debugStreamId, taskRunner));
     if (!decoder->start(codec, inputBufferSize, minNumOutputBuffers, std::move(getPoolCb),
-                        std::move(outputCb), std::move(errorCb))) {
+                        std::move(outputCb), std::move(errorCb), isSecure)) {
         return nullptr;
     }
     return decoder;
 }
 
-V4L2Decoder::V4L2Decoder(scoped_refptr<::base::SequencedTaskRunner> taskRunner)
-      : mTaskRunner(std::move(taskRunner)) {
+V4L2Decoder::V4L2Decoder(uint32_t debugStreamId,
+                         scoped_refptr<::base::SequencedTaskRunner> taskRunner)
+      : mDebugStreamId(debugStreamId), mTaskRunner(std::move(taskRunner)) {
     ALOGV("%s()", __func__);
 
     mWeakThis = mWeakThisFactory.GetWeakPtr();
@@ -91,11 +106,15 @@ V4L2Decoder::~V4L2Decoder() {
         mDevice->stopPolling();
         mDevice = nullptr;
     }
+    if (mInitialEosBuffer) {
+        mInitialEosBuffer = nullptr;
+    }
 }
 
 bool V4L2Decoder::start(const VideoCodec& codec, const size_t inputBufferSize,
                         const size_t minNumOutputBuffers, GetPoolCB getPoolCb, OutputCB outputCb,
-                        ErrorCB errorCb) {
+                        ErrorCB errorCb, bool isSecure) {
+    ATRACE_CALL();
     ALOGV("%s(codec=%s, inputBufferSize=%zu, minNumOutputBuffers=%zu)", __func__,
           VideoCodecToString(codec), inputBufferSize, minNumOutputBuffers);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
@@ -104,15 +123,17 @@ bool V4L2Decoder::start(const VideoCodec& codec, const size_t inputBufferSize,
     mGetPoolCb = std::move(getPoolCb);
     mOutputCb = std::move(outputCb);
     mErrorCb = std::move(errorCb);
+    mCodec = codec;
+    mIsSecure = isSecure;
 
     if (mState == State::Error) {
         ALOGE("Ignore due to error state.");
         return false;
     }
 
-    mDevice = V4L2Device::create();
+    mDevice = V4L2Device::create(mDebugStreamId);
 
-    const uint32_t inputPixelFormat = VideoCodecToV4L2PixFmt(codec);
+    const uint32_t inputPixelFormat = V4L2Device::videoCodecToPixFmt(codec);
     if (!mDevice->open(V4L2Device::Type::kDecoder, inputPixelFormat)) {
         ALOGE("Failed to open device for %s", VideoCodecToString(codec));
         return false;
@@ -123,10 +144,7 @@ bool V4L2Decoder::start(const VideoCodec& codec, const size_t inputBufferSize,
         return false;
     }
 
-    struct v4l2_decoder_cmd cmd;
-    memset(&cmd, 0, sizeof(cmd));
-    cmd.cmd = V4L2_DEC_CMD_STOP;
-    if (mDevice->ioctl(VIDIOC_TRY_DECODER_CMD, &cmd) != 0) {
+    if (!sendV4L2DecoderCmd(false)) {
         ALOGE("Device does not support flushing (V4L2_DEC_CMD_STOP)");
         return false;
     }
@@ -151,8 +169,13 @@ bool V4L2Decoder::start(const VideoCodec& codec, const size_t inputBufferSize,
         ALOGE("Failed to setup input format.");
         return false;
     }
+    if (!setupInitialOutput()) {
+        ALOGE("Unable to setup initial output");
+        return false;
+    }
 
-    if (!mDevice->startPolling(::base::BindRepeating(&V4L2Decoder::serviceDeviceTask, mWeakThis),
+    if (!mDevice->startPolling(mTaskRunner,
+                               ::base::BindRepeating(&V4L2Decoder::serviceDeviceTask, mWeakThis),
                                ::base::BindRepeating(&V4L2Decoder::onError, mWeakThis))) {
         ALOGE("Failed to start polling V4L2 device.");
         return false;
@@ -194,7 +217,135 @@ bool V4L2Decoder::setupInputFormat(const uint32_t inputPixelFormat, const size_t
     return true;
 }
 
+bool V4L2Decoder::setupInitialOutput() {
+    ATRACE_CALL();
+    ALOGV("%s()", __func__);
+    ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
+
+    if (!setupMinimalOutputFormat()) {
+        ALOGE("Failed to set minimal resolution for initial output buffers");
+        return false;
+    }
+
+    if (!startOutputQueue(1, V4L2_MEMORY_DMABUF)) {
+        ALOGE("Failed to start initialy output queue");
+        return false;
+    }
+
+    std::optional<V4L2WritableBufferRef> eosBuffer = mOutputQueue->getFreeBuffer();
+    if (!eosBuffer) {
+        ALOGE("Failed to acquire initial EOS buffer");
+        return false;
+    }
+
+    mInitialEosBuffer =
+            new GraphicBuffer(mCodedSize.getWidth(), mCodedSize.getHeight(),
+                              static_cast<PixelFormat>(HalPixelFormat::YCBCR_420_888),
+                              GraphicBuffer::USAGE_HW_VIDEO_ENCODER, "V4L2DecodeComponent");
+
+    if (mInitialEosBuffer->initCheck() != NO_ERROR) {
+        return false;
+    }
+
+    std::vector<int> fds;
+    for (size_t i = 0; i < mInitialEosBuffer->handle->numFds; i++) {
+        fds.push_back(mInitialEosBuffer->handle->data[i]);
+    }
+
+    if (!std::move(*eosBuffer).queueDMABuf(fds)) {
+        ALOGE("Failed to queue initial EOS buffer");
+        return false;
+    }
+
+    return true;
+}
+
+bool V4L2Decoder::setupMinimalOutputFormat() {
+    ui::Size minResolution, maxResolution;
+
+    for (const uint32_t& pixfmt :
+         mDevice->enumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
+        if (std::find(kSupportedOutputFourccs.begin(), kSupportedOutputFourccs.end(), pixfmt) ==
+            kSupportedOutputFourccs.end()) {
+            ALOGD("Pixel format %s is not supported, skipping...", fourccToString(pixfmt).c_str());
+            continue;
+        }
+
+        mDevice->getSupportedResolution(pixfmt, &minResolution, &maxResolution);
+        if (minResolution.isEmpty()) {
+            minResolution.set(128, 128);
+        }
+
+        if (mOutputQueue->setFormat(pixfmt, minResolution, 0) != std::nullopt) {
+            return true;
+        }
+    }
+
+    ALOGE("Failed to find supported pixel format");
+    return false;
+}
+
+bool V4L2Decoder::startOutputQueue(size_t minOutputBuffersCount, enum v4l2_memory memory) {
+    ALOGV("%s()", __func__);
+    ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
+
+    const std::optional<struct v4l2_format> format = getFormatInfo();
+    std::optional<size_t> numOutputBuffers = getNumOutputBuffers();
+    if (!format || !numOutputBuffers) {
+        return false;
+    }
+    *numOutputBuffers = std::max(*numOutputBuffers, minOutputBuffersCount);
+
+    const ui::Size codedSize(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
+    if (!setupOutputFormat(codedSize)) {
+        return false;
+    }
+
+    const std::optional<struct v4l2_format> adjustedFormat = getFormatInfo();
+    if (!adjustedFormat) {
+        return false;
+    }
+    mCodedSize.set(adjustedFormat->fmt.pix_mp.width, adjustedFormat->fmt.pix_mp.height);
+    mVisibleRect = getVisibleRect(mCodedSize);
+
+    ALOGI("Need %zu output buffers. coded size: %s, visible rect: %s", *numOutputBuffers,
+          toString(mCodedSize).c_str(), toString(mVisibleRect).c_str());
+    if (isEmpty(mCodedSize)) {
+        ALOGE("Failed to get resolution from V4L2 driver.");
+        return false;
+    }
+
+    if (mOutputQueue->isStreaming()) {
+        mOutputQueue->streamoff();
+    }
+    if (mOutputQueue->allocatedBuffersCount() > 0) {
+        mOutputQueue->deallocateBuffers();
+    }
+
+    mFrameAtDevice.clear();
+    mBlockIdToV4L2Id.clear();
+    while (!mReuseFrameQueue.empty()) {
+        mReuseFrameQueue.pop();
+    }
+
+    const size_t adjustedNumOutputBuffers =
+            mOutputQueue->allocateBuffers(*numOutputBuffers, memory);
+    if (adjustedNumOutputBuffers == 0) {
+        ALOGE("Failed to allocate output buffer.");
+        return false;
+    }
+
+    ALOGV("Allocated %zu output buffers.", adjustedNumOutputBuffers);
+    if (!mOutputQueue->streamon()) {
+        ALOGE("Failed to streamon output queue.");
+        return false;
+    }
+
+    return true;
+}
+
 void V4L2Decoder::decode(std::unique_ptr<ConstBitstreamBuffer> buffer, DecodeCB decodeCb) {
+    ATRACE_CALL();
     ALOGV("%s(id=%d)", __func__, buffer->id);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -209,11 +360,18 @@ void V4L2Decoder::decode(std::unique_ptr<ConstBitstreamBuffer> buffer, DecodeCB
         setState(State::Decoding);
     }
 
+    // To determine if the DRC is pending, the access to the frame data is
+    // required. It's not possible to access the frame directly for the secure
+    // playback, so this check must be skipped. b/279834186
+    if (!mIsSecure && mInitialEosBuffer && !mPendingDRC)
+        mPendingDRC = waitForDRC(buffer->dmabuf, mCodec);
+
     mDecodeRequests.push(DecodeRequest(std::move(buffer), std::move(decodeCb)));
     pumpDecodeRequest();
 }
 
 void V4L2Decoder::drain(DecodeCB drainCb) {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -239,6 +397,7 @@ void V4L2Decoder::drain(DecodeCB drainCb) {
 }
 
 void V4L2Decoder::pumpDecodeRequest() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -263,9 +422,33 @@ void V4L2Decoder::pumpDecodeRequest() {
                 return;
             }
 
+            // If output queue is not streaming, then device is unable to notify
+            // whenever draining is finished. (EOS frame cannot be dequeued).
+            // This is likely to happen in the event of that the first resolution
+            // change event wasn't dequeued before the drain request.
+            if (!mOutputQueue->isStreaming()) {
+                ALOGV("Wait for output queue to start streaming");
+                return;
+            }
+
             auto request = std::move(mDecodeRequests.front());
             mDecodeRequests.pop();
 
+            // There is one more case that EOS frame cannot be dequeued because
+            // the first resolution change event wasn't dequeued before - output
+            // queues on the host are not streaming but ARCVM has no knowledge about
+            // it. Check if first resolution change event was received and if there
+            // was no previously sent non-empty frame (other than SPS/PPS/EOS) that
+            // may trigger config from host side.
+            // Drain can only be finished if we are sure there was no stream = no
+            // single frame in the stack.
+            if (mInitialEosBuffer && !mPendingDRC) {
+                ALOGV("Terminate drain, because there was no stream");
+                mTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(std::move(request.decodeCb),
+                                                                  VideoDecoder::DecodeStatus::kOk));
+                return;
+            }
+
             if (!sendV4L2DecoderCmd(false)) {
                 std::move(request.decodeCb).Run(VideoDecoder::DecodeStatus::kError);
                 onError();
@@ -276,13 +459,45 @@ void V4L2Decoder::pumpDecodeRequest() {
             return;
         }
 
+        auto dma_buf_id = getDmabufId(mDecodeRequests.front().buffer->dmabuf.handle()->data[0]);
+        if (!dma_buf_id) {
+            ALOGE("Failed to get dmabuf id");
+            onError();
+            return;
+        }
+
+        std::optional<V4L2WritableBufferRef> inputBuffer;
+        size_t targetIndex = 0;
+
+        // If there's an existing input buffer for this dma buffer, use it.
+        for (; targetIndex < mNextInputBufferId; targetIndex++) {
+            if (mLastDmaBufferId[targetIndex] == dma_buf_id) {
+                break;
+            }
+        }
+
+        if (targetIndex < kNumInputBuffers) {
+            // If we didn't find a buffer and there is an unused buffer, use that one.
+            if (targetIndex == mNextInputBufferId) {
+                mNextInputBufferId++;
+            }
+
+            inputBuffer = mInputQueue->getFreeBuffer(targetIndex);
+        }
+
+        // If we didn't find a reusable/unused input buffer, clobber a free one.
+        if (!inputBuffer) {
+            inputBuffer = mInputQueue->getFreeBuffer();
+        }
+
         // Pause if no free input buffer. We resume decoding after dequeueing input buffers.
-        auto inputBuffer = mInputQueue->getFreeBuffer();
         if (!inputBuffer) {
             ALOGV("There is no free input buffer.");
             return;
         }
 
+        mLastDmaBufferId[inputBuffer->bufferId()] = *dma_buf_id;
+
         auto request = std::move(mDecodeRequests.front());
         mDecodeRequests.pop();
 
@@ -314,6 +529,7 @@ void V4L2Decoder::pumpDecodeRequest() {
 }
 
 void V4L2Decoder::flush() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -339,7 +555,22 @@ void V4L2Decoder::flush() {
     const bool isOutputStreaming = mOutputQueue->isStreaming();
     mDevice->stopPolling();
     mOutputQueue->streamoff();
+
+    // Extract currently enqueued output picture buffers to be queued later first.
+    // See b/270003218 and b/297228544.
+    for (auto& [v4l2Id, frame] : mFrameAtDevice) {
+        // Find corresponding mapping block ID (DMABUF ID) to V4L2 buffer ID.
+        // The buffer was enqueued to device therefore such mapping have to exist.
+        auto blockIdIter =
+                std::find_if(mBlockIdToV4L2Id.begin(), mBlockIdToV4L2Id.end(),
+                             [v4l2Id = v4l2Id](const auto& el) { return el.second == v4l2Id; });
+
+        ALOG_ASSERT(blockIdIter != mBlockIdToV4L2Id.end());
+        size_t blockId = blockIdIter->first;
+        mReuseFrameQueue.push(std::make_pair(blockId, std::move(frame)));
+    }
     mFrameAtDevice.clear();
+
     mInputQueue->streamoff();
 
     // Streamon both V4L2 queues.
@@ -355,7 +586,8 @@ void V4L2Decoder::flush() {
         tryFetchVideoFrame();
     }
 
-    if (!mDevice->startPolling(::base::BindRepeating(&V4L2Decoder::serviceDeviceTask, mWeakThis),
+    if (!mDevice->startPolling(mTaskRunner,
+                               ::base::BindRepeating(&V4L2Decoder::serviceDeviceTask, mWeakThis),
                                ::base::BindRepeating(&V4L2Decoder::onError, mWeakThis))) {
         ALOGE("Failed to start polling V4L2 device.");
         onError();
@@ -366,6 +598,7 @@ void V4L2Decoder::flush() {
 }
 
 void V4L2Decoder::serviceDeviceTask(bool event) {
+    ATRACE_CALL();
     ALOGV("%s(event=%d) state=%s InputQueue(%s):%zu+%zu/%zu, OutputQueue(%s):%zu+%zu/%zu", __func__,
           event, StateToString(mState), (mInputQueue->isStreaming() ? "streamon" : "streamoff"),
           mInputQueue->freeBuffersCount(), mInputQueue->queuedBuffersCount(),
@@ -373,6 +606,7 @@ void V4L2Decoder::serviceDeviceTask(bool event) {
           (mOutputQueue->isStreaming() ? "streamon" : "streamoff"),
           mOutputQueue->freeBuffersCount(), mOutputQueue->queuedBuffersCount(),
           mOutputQueue->allocatedBuffersCount());
+
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
     if (mState == State::Error) return;
@@ -482,6 +716,7 @@ void V4L2Decoder::serviceDeviceTask(bool event) {
 }
 
 bool V4L2Decoder::dequeueResolutionChangeEvent() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -497,58 +732,33 @@ bool V4L2Decoder::dequeueResolutionChangeEvent() {
 }
 
 bool V4L2Decoder::changeResolution() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
-    const std::optional<struct v4l2_format> format = getFormatInfo();
-    std::optional<size_t> numOutputBuffers = getNumOutputBuffers();
-    if (!format || !numOutputBuffers) {
-        return false;
-    }
-    *numOutputBuffers = std::max(*numOutputBuffers, mMinNumOutputBuffers);
-
-    const ui::Size codedSize(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
-    if (!setupOutputFormat(codedSize)) {
-        return false;
-    }
-
-    const std::optional<struct v4l2_format> adjustedFormat = getFormatInfo();
-    if (!adjustedFormat) {
-        return false;
+    if (mInitialEosBuffer) {
+        mInitialEosBuffer = nullptr;
     }
-    mCodedSize.set(adjustedFormat->fmt.pix_mp.width, adjustedFormat->fmt.pix_mp.height);
-    mVisibleRect = getVisibleRect(mCodedSize);
 
-    ALOGI("Need %zu output buffers. coded size: %s, visible rect: %s", *numOutputBuffers,
-          toString(mCodedSize).c_str(), toString(mVisibleRect).c_str());
-    if (isEmpty(mCodedSize)) {
-        ALOGE("Failed to get resolution from V4L2 driver.");
+    if (!startOutputQueue(mMinNumOutputBuffers, V4L2_MEMORY_DMABUF)) {
+        ALOGE("Failed to start output queue during DRC.");
         return false;
     }
 
-    mOutputQueue->streamoff();
-    mOutputQueue->deallocateBuffers();
-    mFrameAtDevice.clear();
-    mBlockIdToV4L2Id.clear();
-
-    const size_t adjustedNumOutputBuffers =
-            mOutputQueue->allocateBuffers(*numOutputBuffers, V4L2_MEMORY_DMABUF);
-    if (adjustedNumOutputBuffers == 0) {
-        ALOGE("Failed to allocate output buffer.");
-        return false;
-    }
-    ALOGV("Allocated %zu output buffers.", adjustedNumOutputBuffers);
-    if (!mOutputQueue->streamon()) {
-        ALOGE("Failed to streamon output queue.");
-        return false;
+    // If drain request is pending then it means that previous call to pumpDecodeRequest
+    // stalled the request, bacause there was no way of notifing the component that
+    // drain has finished. Send this request the drain to device.
+    if (!mDecodeRequests.empty() && mDecodeRequests.front().buffer == nullptr) {
+        mTaskRunner->PostTask(FROM_HERE,
+                              ::base::BindOnce(&V4L2Decoder::pumpDecodeRequest, mWeakThis));
     }
 
     // Release the previous VideoFramePool before getting a new one to guarantee only one pool
     // exists at the same time.
     mVideoFramePool.reset();
     // Always use flexible pixel 420 format YCBCR_420_888 in Android.
-    mVideoFramePool =
-            mGetPoolCb.Run(mCodedSize, HalPixelFormat::YCBCR_420_888, adjustedNumOutputBuffers);
+    mVideoFramePool = mGetPoolCb.Run(mCodedSize, HalPixelFormat::YCBCR_420_888,
+                                     mOutputQueue->allocatedBuffersCount());
     if (!mVideoFramePool) {
         ALOGE("Failed to get block pool with size: %s", toString(mCodedSize).c_str());
         return false;
@@ -577,6 +787,7 @@ bool V4L2Decoder::setupOutputFormat(const ui::Size& size) {
 }
 
 void V4L2Decoder::tryFetchVideoFrame() {
+    ATRACE_CALL();
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
@@ -591,10 +802,26 @@ void V4L2Decoder::tryFetchVideoFrame() {
         return;
     }
 
-    if (!mVideoFramePool->getVideoFrame(
-                ::base::BindOnce(&V4L2Decoder::onVideoFrameReady, mWeakThis))) {
-        ALOGV("%s(): Previous callback is running, ignore.", __func__);
+    if (mReuseFrameQueue.empty()) {
+        if (!mVideoFramePool->getVideoFrame(
+                    ::base::BindOnce(&V4L2Decoder::onVideoFrameReady, mWeakThis))) {
+            ALOGV("%s(): Previous callback is running, ignore.", __func__);
+        }
+
+        return;
     }
+
+    // Reuse output picture buffers that were abandoned after STREAMOFF first.
+    // NOTE(b/270003218 and b/297228544): This avoids issues with lack of
+    // ability to return all picture buffers on STREAMOFF from VDA and
+    // saves on IPC with BufferQueue increasing overall responsiveness.
+    uint32_t blockId = mReuseFrameQueue.front().first;
+    std::unique_ptr<VideoFrame> frame = std::move(mReuseFrameQueue.front().second);
+    mReuseFrameQueue.pop();
+
+    // Avoid recursive calls
+    mTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&V4L2Decoder::onVideoFrameReady, mWeakThis,
+                                                      std::make_pair(std::move(frame), blockId)));
 }
 
 void V4L2Decoder::onVideoFrameReady(
@@ -619,12 +846,24 @@ void V4L2Decoder::onVideoFrameReady(
     if (iter != mBlockIdToV4L2Id.end()) {
         // If we have met this block in the past, reuse the same V4L2 buffer.
         outputBuffer = mOutputQueue->getFreeBuffer(iter->second);
+        if (!outputBuffer) {
+            // NOTE(b/281477122): There is a bug in C2BufferQueueBlock. Its buffer queue slots
+            // cache is inconsistent when MediaSync is used and a buffer with the same dmabuf id
+            // can be returned twice despite being already in use by V4L2Decoder. We drop the
+            // buffer here in order to prevent unwanted errors. It is safe, bacause its allocation
+            // will be kept alive by the C2GraphicBlock instance.
+            ALOGW("%s(): The frame have been supplied again, despite being already enqueued",
+                  __func__);
+            tryFetchVideoFrame();
+            return;
+        }
     } else if (mBlockIdToV4L2Id.size() < mOutputQueue->allocatedBuffersCount()) {
         // If this is the first time we see this block, give it the next
         // available V4L2 buffer.
         const size_t v4l2BufferId = mBlockIdToV4L2Id.size();
         mBlockIdToV4L2Id.emplace(blockId, v4l2BufferId);
         outputBuffer = mOutputQueue->getFreeBuffer(v4l2BufferId);
+        ALOG_ASSERT(v4l2BufferId == outputBuffer->bufferId());
     } else {
         // If this happens, this is a bug in VideoFramePool. It should never
         // provide more blocks than we have V4L2 buffers.
diff --git a/common/V4L2Device.cpp b/v4l2/V4L2Device.cpp
similarity index 75%
rename from common/V4L2Device.cpp
rename to v4l2/V4L2Device.cpp
index 4e44fed6ce34b167517c406535298df2dd7a1a0a..0190deba4e50f843a5db44d2e2d8038c3e915607 100644
--- a/common/V4L2Device.cpp
+++ b/v4l2/V4L2Device.cpp
@@ -6,9 +6,11 @@
 //       versions (e.g. V4L2_PIX_FMT_VP8_FRAME)
 
 //#define LOG_NDEBUG 0
+#define ATRACE_TAG ATRACE_TAG_VIDEO
 #define LOG_TAG "V4L2Device"
 
-#include <v4l2_codec2/common/V4L2Device.h>
+#include <linux/v4l2-controls.h>
+#include <v4l2_codec2/v4l2/V4L2Device.h>
 
 #include <fcntl.h>
 #include <inttypes.h>
@@ -19,6 +21,7 @@
 #include <sys/eventfd.h>
 #include <sys/ioctl.h>
 #include <sys/mman.h>
+#include <utils/Trace.h>
 
 #include <algorithm>
 #include <mutex>
@@ -35,28 +38,32 @@
 #include <v4l2_codec2/common/Fourcc.h>
 #include <v4l2_codec2/common/VideoPixelFormat.h>
 
-// VP8 parsed frames
-#ifndef V4L2_PIX_FMT_VP8_FRAME
-#define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F')
-#endif
-
-// VP9 parsed frames
-#ifndef V4L2_PIX_FMT_VP9_FRAME
-#define V4L2_PIX_FMT_VP9_FRAME v4l2_fourcc('V', 'P', '9', 'F')
-#endif
-
-// H264 parsed slices
-#ifndef V4L2_PIX_FMT_H264_SLICE
-#define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4')
-#endif
-
-// HEVC parsed slices
-#ifndef V4L2_PIX_FMT_HEVC_SLICE
-#define V4L2_PIX_FMT_HEVC_SLICE v4l2_fourcc('S', '2', '6', '5')
-#endif
-
 namespace android {
 
+bool isValidPixFmtForCodec(VideoCodec codec, uint32_t pixFmt) {
+    switch (pixFmt) {
+    case V4L2_PIX_FMT_H264:
+    case V4L2_PIX_FMT_H264_SLICE:
+        return codec == VideoCodec::H264;
+        break;
+    case V4L2_PIX_FMT_VP8:
+    case V4L2_PIX_FMT_VP8_FRAME:
+        return codec == VideoCodec::VP8;
+        break;
+    case V4L2_PIX_FMT_VP9:
+    case V4L2_PIX_FMT_VP9_FRAME:
+        return codec == VideoCodec::VP9;
+        break;
+    case V4L2_PIX_FMT_HEVC:
+    case V4L2_PIX_FMT_HEVC_SLICE:
+        return codec == VideoCodec::HEVC;
+        break;
+    default:
+        ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
+        return false;
+    }
+}
+
 struct v4l2_format buildV4L2Format(const enum v4l2_buf_type type, uint32_t fourcc,
                                    const ui::Size& size, size_t buffer_size, uint32_t stride) {
     struct v4l2_format format;
@@ -727,6 +734,7 @@ V4L2Queue::~V4L2Queue() {
 
 std::optional<struct v4l2_format> V4L2Queue::setFormat(uint32_t fourcc, const ui::Size& size,
                                                        size_t bufferSize, uint32_t stride) {
+    ATRACE_CALL();
     struct v4l2_format format = buildV4L2Format(mType, fourcc, size, bufferSize, stride);
     if (mDevice->ioctl(VIDIOC_S_FMT, &format) != 0 || format.fmt.pix_mp.pixelformat != fourcc) {
         ALOGEQ("Failed to set format (format_fourcc=0x%" PRIx32 ")", fourcc);
@@ -761,6 +769,7 @@ std::pair<std::optional<struct v4l2_format>, int> V4L2Queue::getFormat() {
 }
 
 size_t V4L2Queue::allocateBuffers(size_t count, enum v4l2_memory memory) {
+    ATRACE_CALL();
     DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
     ALOG_ASSERT(!mFreeBuffers);
     ALOG_ASSERT(mQueuedBuffers.size() == 0u);
@@ -827,6 +836,7 @@ size_t V4L2Queue::allocateBuffers(size_t count, enum v4l2_memory memory) {
     ALOG_ASSERT(mFreeBuffers);
     ALOG_ASSERT(mFreeBuffers->size() == mBuffers.size());
     ALOG_ASSERT(mQueuedBuffers.size() == 0u);
+    reportTraceMetrics();
 
     return mBuffers.size();
 }
@@ -860,6 +870,7 @@ bool V4L2Queue::deallocateBuffers() {
 
     ALOG_ASSERT(!mFreeBuffers);
     ALOG_ASSERT(mQueuedBuffers.size() == 0u);
+    reportTraceMetrics();
 
     return true;
 }
@@ -903,6 +914,29 @@ std::optional<V4L2WritableBufferRef> V4L2Queue::getFreeBuffer(size_t requestedBu
                                                    mWeakThisFactory.GetWeakPtr());
 }
 
+void V4L2Queue::reportTraceMetrics() {
+    // Don't printf labels if ATrace is not enabled
+    if (!ATRACE_ENABLED()) return;
+
+    std::string atraceLabel;
+
+    atraceLabel =
+            V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType, "streamon");
+    ATRACE_INT(atraceLabel.c_str(), isStreaming());
+
+    atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
+                                                          "buffers free");
+    ATRACE_INT64(atraceLabel.c_str(), freeBuffersCount());
+
+    atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
+                                                          "buffers queued");
+    ATRACE_INT64(atraceLabel.c_str(), queuedBuffersCount());
+
+    atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
+                                                          "buffers allocated");
+    ATRACE_INT64(atraceLabel.c_str(), allocatedBuffersCount());
+}
+
 bool V4L2Queue::queueBuffer(struct v4l2_buffer* v4l2Buffer) {
     DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
 
@@ -912,6 +946,12 @@ bool V4L2Queue::queueBuffer(struct v4l2_buffer* v4l2Buffer) {
         return false;
     }
 
+    if (ATRACE_ENABLED()) {
+        std::string atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(
+                mDevice->getDebugStreamId(), mType, "enqueued buffer");
+        ATRACE_ASYNC_BEGIN(atraceLabel.c_str(), v4l2Buffer->index);
+    }
+
     auto inserted = mQueuedBuffers.emplace(v4l2Buffer->index);
     if (!inserted.second) {
         ALOGE("Queuing buffer failed");
@@ -920,6 +960,8 @@ bool V4L2Queue::queueBuffer(struct v4l2_buffer* v4l2Buffer) {
 
     mDevice->schedulePoll();
 
+    reportTraceMetrics();
+
     return true;
 }
 
@@ -961,12 +1003,20 @@ std::pair<bool, V4L2ReadableBufferRef> V4L2Queue::dequeueBuffer() {
         }
     }
 
+    if (ATRACE_ENABLED()) {
+        std::string atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(
+                mDevice->getDebugStreamId(), mType, "enqueued buffer");
+        ATRACE_ASYNC_END(atraceLabel.c_str(), v4l2Buffer.index);
+    }
+
     auto it = mQueuedBuffers.find(v4l2Buffer.index);
     ALOG_ASSERT(it != mQueuedBuffers.end());
     mQueuedBuffers.erase(*it);
 
     if (queuedBuffersCount() > 0) mDevice->schedulePoll();
 
+    reportTraceMetrics();
+
     ALOG_ASSERT(mFreeBuffers);
     return std::make_pair(true, V4L2BufferRefFactory::CreateReadableRef(
                                         v4l2Buffer, mWeakThisFactory.GetWeakPtr()));
@@ -991,6 +1041,7 @@ bool V4L2Queue::streamon() {
     }
 
     mIsStreaming = true;
+    reportTraceMetrics();
 
     return true;
 }
@@ -1018,6 +1069,8 @@ bool V4L2Queue::streamoff() {
 
     mIsStreaming = false;
 
+    reportTraceMetrics();
+
     return true;
 }
 
@@ -1053,7 +1106,7 @@ public:
     }
 };
 
-V4L2Device::V4L2Device() {
+V4L2Device::V4L2Device(uint32_t debugStreamId) : mDebugStreamId(debugStreamId) {
     DETACH_FROM_SEQUENCE(mClientSequenceChecker);
 }
 
@@ -1095,9 +1148,9 @@ void V4L2Device::onQueueDestroyed(v4l2_buf_type bufType) {
 }
 
 // static
-scoped_refptr<V4L2Device> V4L2Device::create() {
+scoped_refptr<V4L2Device> V4L2Device::create(uint32_t debugStreamId) {
     ALOGV("%s()", __func__);
-    return scoped_refptr<V4L2Device>(new V4L2Device());
+    return scoped_refptr<V4L2Device>(new V4L2Device(debugStreamId));
 }
 
 bool V4L2Device::open(Type type, uint32_t v4l2PixFmt) {
@@ -1130,7 +1183,7 @@ int V4L2Device::ioctl(int request, void* arg) {
     return HANDLE_EINTR(::ioctl(mDeviceFd.get(), request, arg));
 }
 
-bool V4L2Device::poll(bool pollDevice, bool* eventPending) {
+bool V4L2Device::poll(bool pollDevice, bool pollBuffers, bool* eventPending, bool* buffersPending) {
     struct pollfd pollfds[2];
     nfds_t nfds;
     int pollfd = -1;
@@ -1142,7 +1195,11 @@ bool V4L2Device::poll(bool pollDevice, bool* eventPending) {
     if (pollDevice) {
         ALOGV("adding device fd to poll() set");
         pollfds[nfds].fd = mDeviceFd.get();
-        pollfds[nfds].events = POLLIN | POLLOUT | POLLERR | POLLPRI;
+        pollfds[nfds].events = POLLERR | POLLPRI;
+        if (pollBuffers) {
+            ALOGV("will poll buffers");
+            pollfds[nfds].events |= POLLIN | POLLOUT;
+        }
         pollfd = nfds;
         nfds++;
     }
@@ -1152,6 +1209,7 @@ bool V4L2Device::poll(bool pollDevice, bool* eventPending) {
         return false;
     }
     *eventPending = (pollfd != -1 && pollfds[pollfd].revents & POLLPRI);
+    *buffersPending = (pollfd != -1 && pollfds[pollfd].revents & (POLLIN | POLLOUT));
     return true;
 }
 
@@ -1222,7 +1280,7 @@ std::vector<uint32_t> V4L2Device::preferredInputFormat(Type type) {
 }
 
 // static
-uint32_t V4L2Device::C2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sliceBased) {
+uint32_t V4L2Device::c2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sliceBased) {
     if (profile >= C2Config::PROFILE_AVC_BASELINE &&
         profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH) {
         if (sliceBased) {
@@ -1255,14 +1313,160 @@ uint32_t V4L2Device::C2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sli
     }
 }
 
+// static
+C2Config::level_t V4L2Device::v4L2LevelToC2Level(VideoCodec codec, uint32_t level) {
+    switch (codec) {
+    case VideoCodec::H264:
+        switch (level) {
+        case V4L2_MPEG_VIDEO_H264_LEVEL_1_0:
+            return C2Config::LEVEL_AVC_1;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_1B:
+            return C2Config::LEVEL_AVC_1B;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_1_1:
+            return C2Config::LEVEL_AVC_1_1;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_1_2:
+            return C2Config::LEVEL_AVC_1_2;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_1_3:
+            return C2Config::LEVEL_AVC_1_3;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_2_0:
+            return C2Config::LEVEL_AVC_2;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_2_1:
+            return C2Config::LEVEL_AVC_2_1;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_2_2:
+            return C2Config::LEVEL_AVC_2_2;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_3_0:
+            return C2Config::LEVEL_AVC_3;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_3_1:
+            return C2Config::LEVEL_AVC_3_1;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_3_2:
+            return C2Config::LEVEL_AVC_3_2;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_4_0:
+            return C2Config::LEVEL_AVC_4;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_4_1:
+            return C2Config::LEVEL_AVC_4_1;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_4_2:
+            return C2Config::LEVEL_AVC_4_2;
+        case V4L2_MPEG_VIDEO_H264_LEVEL_5_0:
+            return C2Config::LEVEL_AVC_5;
+#ifdef V4L2_MPEG_VIDEO_H264_LEVEL_5_1
+        case V4L2_MPEG_VIDEO_H264_LEVEL_5_1:
+            return C2Config::LEVEL_AVC_5_1;
+#endif
+#ifdef V4L2_MPEG_VIDEO_H264_LEVEL_5_2
+        case V4L2_MPEG_VIDEO_H264_LEVEL_5_2:
+            return C2Config::LEVEL_AVC_5_2;
+#endif
+#ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_0
+        case V4L2_MPEG_VIDEO_H264_LEVEL_6_0:
+            return C2Config::LEVEL_AVC_6;
+#endif
+#ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_1
+        case V4L2_MPEG_VIDEO_H264_LEVEL_6_1:
+            return C2Config::LEVEL_AVC_6_1;
+#endif
+#ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_2
+        case V4L2_MPEG_VIDEO_H264_LEVEL_6_2:
+            return C2Config::LEVEL_AVC_6_2;
+#endif
+        }
+        break;
+    case VideoCodec::VP8:
+        return C2Config::LEVEL_UNUSED;
+        break;
+    case VideoCodec::VP9:
+        switch (level) {
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_1_0
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_1_0:
+            return C2Config::LEVEL_VP9_1;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_1_1:
+            return C2Config::LEVEL_VP9_1_1;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_2_0:
+            return C2Config::LEVEL_VP9_2;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_2_1:
+            return C2Config::LEVEL_VP9_2_1;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_3_0:
+            return C2Config::LEVEL_VP9_3;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_3_1:
+            return C2Config::LEVEL_VP9_3_1;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_4_0:
+            return C2Config::LEVEL_VP9_4;
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_4_1:
+            return C2Config::LEVEL_VP9_4_1;
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_0
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_5_0:
+            return C2Config::LEVEL_VP9_5;
+#endif
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_1
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_5_1:
+            return C2Config::LEVEL_VP9_5_1;
+#endif
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_2
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_5_2:
+            return C2Config::LEVEL_VP9_5_2;
+#endif
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_0
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_6_0:
+            return C2Config::LEVEL_VP9_6;
+#endif
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_1
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_6_1:
+            return C2Config::LEVEL_VP9_6_1;
+#endif
+#ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_2
+        case V4L2_MPEG_VIDEO_VP9_LEVEL_6_2:
+            return C2Config::LEVEL_VP9_6_2;
+#endif
+#endif
+        default:
+            return C2Config::LEVEL_UNUSED;
+        }
+        break;
+    case VideoCodec::HEVC:
+        switch (level) {
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_1:
+            return C2Config::LEVEL_HEVC_MAIN_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_2:
+            return C2Config::LEVEL_HEVC_MAIN_2;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_2_1:
+            return C2Config::LEVEL_HEVC_MAIN_2_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_3:
+            return C2Config::LEVEL_HEVC_MAIN_3;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_3_1:
+            return C2Config::LEVEL_HEVC_MAIN_3_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_4:
+            return C2Config::LEVEL_HEVC_MAIN_4;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_4_1:
+            return C2Config::LEVEL_HEVC_MAIN_4_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_5:
+            return C2Config::LEVEL_HEVC_MAIN_5;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_5_1:
+            return C2Config::LEVEL_HEVC_MAIN_5_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_5_2:
+            return C2Config::LEVEL_HEVC_MAIN_5_2;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_6:
+            return C2Config::LEVEL_HEVC_MAIN_6;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_6_1:
+            return C2Config::LEVEL_HEVC_MAIN_6_1;
+        case V4L2_MPEG_VIDEO_HEVC_LEVEL_6_2:
+            return C2Config::LEVEL_HEVC_MAIN_6_2;
+        }
+        break;
+    default:
+        ALOGE("Unknown codec: %u", codec);
+    }
+    ALOGE("Unknown level: %u", level);
+    return C2Config::LEVEL_UNUSED;
+}
+
 // static
 C2Config::profile_t V4L2Device::v4L2ProfileToC2Profile(VideoCodec codec, uint32_t profile) {
     switch (codec) {
     case VideoCodec::H264:
         switch (profile) {
         case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
-        case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
             return C2Config::PROFILE_AVC_BASELINE;
+        case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
+            return C2Config::PROFILE_AVC_CONSTRAINED_BASELINE;
         case V4L2_MPEG_VIDEO_H264_PROFILE_MAIN:
             return C2Config::PROFILE_AVC_MAIN;
         case V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED:
@@ -1312,8 +1516,112 @@ C2Config::profile_t V4L2Device::v4L2ProfileToC2Profile(VideoCodec codec, uint32_
     return C2Config::PROFILE_UNUSED;
 }
 
-std::vector<C2Config::profile_t> V4L2Device::v4L2PixFmtToC2Profiles(uint32_t pixFmt,
-                                                                    bool /*isEncoder*/) {
+// static
+uint32_t V4L2Device::videoCodecToPixFmt(VideoCodec codec) {
+    switch (codec) {
+    case VideoCodec::H264:
+        return V4L2_PIX_FMT_H264;
+    case VideoCodec::VP8:
+        return V4L2_PIX_FMT_VP8;
+    case VideoCodec::VP9:
+        return V4L2_PIX_FMT_VP9;
+    case VideoCodec::HEVC:
+        return V4L2_PIX_FMT_HEVC;
+    }
+}
+
+std::vector<C2Config::level_t> V4L2Device::queryC2Levels(uint32_t pixFmt) {
+    auto getSupportedLevels = [this](VideoCodec codec, std::vector<C2Config::level_t>* levels) {
+        uint32_t queryId = 0;
+        switch (codec) {
+        case VideoCodec::H264:
+            queryId = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
+            break;
+#ifdef V4L2_CID_MPEG_VIDEO_VP9_LEVEL
+        case VideoCodec::VP9:
+            queryId = V4L2_CID_MPEG_VIDEO_VP9_LEVEL;
+            break;
+#endif
+        case VideoCodec::HEVC:
+            queryId = V4L2_CID_MPEG_VIDEO_HEVC_LEVEL;
+            break;
+        default:
+            return false;
+        }
+
+        v4l2_queryctrl queryCtrl = {};
+        queryCtrl.id = queryId;
+        if (ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {
+            return false;
+        }
+        v4l2_querymenu queryMenu = {};
+        queryMenu.id = queryCtrl.id;
+        for (queryMenu.index = queryCtrl.minimum;
+             static_cast<int>(queryMenu.index) <= queryCtrl.maximum; queryMenu.index++) {
+            if (ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
+                const C2Config::level_t level =
+                        V4L2Device::v4L2LevelToC2Level(codec, queryMenu.index);
+                if (level != C2Config::LEVEL_UNUSED) levels->push_back(level);
+            }
+        }
+        return true;
+    };
+
+    std::vector<C2Config::level_t> levels;
+    switch (pixFmt) {
+    case V4L2_PIX_FMT_H264:
+    case V4L2_PIX_FMT_H264_SLICE:
+        if (!getSupportedLevels(VideoCodec::H264, &levels)) {
+            ALOGW("Driver doesn't support QUERY H264 levels, "
+                  "use default values, 1-5_2");
+            levels = {C2Config::LEVEL_AVC_1,   C2Config::LEVEL_AVC_1B,  C2Config::LEVEL_AVC_1_1,
+                      C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
+                      C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2, C2Config::LEVEL_AVC_3,
+                      C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
+                      C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2, C2Config::LEVEL_AVC_5,
+                      C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2};
+        }
+        break;
+    case V4L2_PIX_FMT_VP8:
+    case V4L2_PIX_FMT_VP8_FRAME:
+        if (!getSupportedLevels(VideoCodec::VP8, &levels)) {
+            ALOGW("Driver doesn't support QUERY VP8 levels, use default values, unused");
+            levels = {C2Config::LEVEL_UNUSED};
+        }
+        break;
+    case V4L2_PIX_FMT_VP9:
+    case V4L2_PIX_FMT_VP9_FRAME:
+        if (!getSupportedLevels(VideoCodec::VP9, &levels)) {
+            ALOGW("Driver doesn't support QUERY VP9 levels, use default values, 1-5");
+            levels = {C2Config::LEVEL_VP9_1,   C2Config::LEVEL_VP9_1_1, C2Config::LEVEL_VP9_2,
+                      C2Config::LEVEL_VP9_2_1, C2Config::LEVEL_VP9_3,   C2Config::LEVEL_VP9_3_1,
+                      C2Config::LEVEL_VP9_4,   C2Config::LEVEL_VP9_4_1, C2Config::LEVEL_VP9_5};
+        }
+        break;
+    case V4L2_PIX_FMT_HEVC:
+    case V4L2_PIX_FMT_HEVC_SLICE:
+        if (!getSupportedLevels(VideoCodec::VP9, &levels)) {
+            ALOGW("Driver doesn't support QUERY HEVC levels, use default values");
+            levels = {C2Config::LEVEL_HEVC_MAIN_1,   C2Config::LEVEL_HEVC_MAIN_2,
+                      C2Config::LEVEL_HEVC_MAIN_2_1, C2Config::LEVEL_HEVC_MAIN_3,
+                      C2Config::LEVEL_HEVC_MAIN_3_1, C2Config::LEVEL_HEVC_MAIN_4,
+                      C2Config::LEVEL_HEVC_MAIN_4_1, C2Config::LEVEL_HEVC_MAIN_5,
+                      C2Config::LEVEL_HEVC_MAIN_5_1, C2Config::LEVEL_HEVC_MAIN_5_2,
+                      C2Config::LEVEL_HEVC_MAIN_6,   C2Config::LEVEL_HEVC_MAIN_6_1,
+                      C2Config::LEVEL_HEVC_MAIN_6_2};
+        }
+        break;
+    default:
+        ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
+        return {};
+    }
+
+    std::sort(levels.begin(), levels.end());
+    levels.erase(std::unique(levels.begin(), levels.end()), levels.end());
+    return levels;
+}
+
+std::vector<C2Config::profile_t> V4L2Device::queryC2Profiles(uint32_t pixFmt) {
     auto getSupportedProfiles = [this](VideoCodec codec,
                                        std::vector<C2Config::profile_t>* profiles) {
         uint32_t queryId = 0;
@@ -1361,6 +1669,7 @@ std::vector<C2Config::profile_t> V4L2Device::v4L2PixFmtToC2Profiles(uint32_t pix
                   "use default values, Base, Main, High");
             profiles = {
                     C2Config::PROFILE_AVC_BASELINE,
+                    C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
                     C2Config::PROFILE_AVC_MAIN,
                     C2Config::PROFILE_AVC_HIGH,
             };
@@ -1406,6 +1715,8 @@ int32_t V4L2Device::c2ProfileToV4L2H264Profile(C2Config::profile_t profile) {
     switch (profile) {
     case C2Config::PROFILE_AVC_BASELINE:
         return V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
+    case C2Config::PROFILE_AVC_CONSTRAINED_BASELINE:
+        return V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE;
     case C2Config::PROFILE_AVC_MAIN:
         return V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
     case C2Config::PROFILE_AVC_EXTENDED:
@@ -1474,7 +1785,7 @@ int32_t V4L2Device::h264LevelIdcToV4L2H264Level(uint8_t levelIdc) {
 }
 
 // static
-v4l2_mpeg_video_bitrate_mode V4L2Device::C2BitrateModeToV4L2BitrateMode(
+v4l2_mpeg_video_bitrate_mode V4L2Device::c2BitrateModeToV4L2BitrateMode(
         C2Config::bitrate_mode_t bitrateMode) {
     switch (bitrateMode) {
     case C2Config::bitrate_mode_t::BITRATE_CONST_SKIP_ALLOWED:
@@ -1603,6 +1914,30 @@ const char* V4L2Device::v4L2BufferTypeToString(const enum v4l2_buf_type bufType)
     }
 }
 
+// static
+std::string V4L2Device::v4L2BufferTypeToATraceLabel(uint32_t debugStreamId,
+                                                    const enum v4l2_buf_type type,
+                                                    const char* label) {
+    const char* queueName;
+    switch (type) {
+    case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+        FALLTHROUGH;
+    case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+        queueName = "CAPTURE";
+        break;
+    case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+        FALLTHROUGH;
+    case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+        queueName = "OUTPUT";
+        break;
+    default:
+        queueName = "";
+        break;
+    }
+
+    return base::StringPrintf("#%u V4L2 %s %s", debugStreamId, queueName, label);
+}
+
 // static
 std::string V4L2Device::v4L2FormatToString(const struct v4l2_format& format) {
     std::ostringstream s;
@@ -1808,95 +2143,220 @@ std::vector<uint32_t> V4L2Device::enumerateSupportedPixelformats(v4l2_buf_type b
     return pixelFormats;
 }
 
-V4L2Device::SupportedDecodeProfiles V4L2Device::getSupportedDecodeProfiles(
-        const size_t numFormats, const uint32_t pixelFormats[]) {
-    SupportedDecodeProfiles supportedProfiles;
-
+// static
+std::vector<C2Config::level_t> V4L2Device::getSupportedDecodeLevels(VideoCodec videoCodecType) {
+    std::vector<C2Config::level_t> supportedLevels;
     Type type = Type::kDecoder;
-    const auto& devices = getDevicesForType(type);
-    for (const auto& device : devices) {
-        if (!openDevicePath(device.first, type)) {
-            ALOGV("Failed opening %s", device.first.c_str());
+
+    for (const auto& info : getDeviceInfosForType(type)) {
+        scoped_refptr<V4L2Device> device = V4L2Device::create();
+        if (!device->openDevicePath(info.first, type)) {
+            ALOGV("Failed opening %s", info.first.c_str());
             continue;
         }
 
-        const auto& profiles = enumerateSupportedDecodeProfiles(numFormats, pixelFormats);
-        supportedProfiles.insert(supportedProfiles.end(), profiles.begin(), profiles.end());
-        closeDevice();
+        const auto& levels = device->enumerateSupportedDecodeLevels(videoCodecType);
+        supportedLevels.insert(supportedLevels.end(), levels.begin(), levels.end());
+        device->closeDevice();
     }
 
-    return supportedProfiles;
+    return supportedLevels;
 }
 
-V4L2Device::SupportedEncodeProfiles V4L2Device::getSupportedEncodeProfiles() {
-    SupportedEncodeProfiles supportedProfiles;
-
-    Type type = Type::kEncoder;
-    const auto& devices = getDevicesForType(type);
-    for (const auto& device : devices) {
-        if (!openDevicePath(device.first, type)) {
-            ALOGV("Failed opening %s", device.first.c_str());
+// static
+SupportedProfiles V4L2Device::getSupportedProfiles(V4L2Device::Type type,
+                                                   const std::vector<uint32_t>& pixelFormats) {
+    SupportedProfiles supportedProfiles;
+
+    for (const auto& info : getDeviceInfosForType(type)) {
+        scoped_refptr<V4L2Device> device = V4L2Device::create();
+        if (!device->openDevicePath(info.first, type)) {
+            ALOGV("Failed opening %s", info.first.c_str());
             continue;
         }
 
-        const auto& profiles = enumerateSupportedEncodeProfiles();
+        const auto& profiles = device->enumerateSupportedProfiles(type, pixelFormats);
         supportedProfiles.insert(supportedProfiles.end(), profiles.begin(), profiles.end());
-        closeDevice();
+
+        device->closeDevice();
     }
 
     return supportedProfiles;
 }
 
-V4L2Device::SupportedDecodeProfiles V4L2Device::enumerateSupportedDecodeProfiles(
-        const size_t numFormats, const uint32_t pixelFormats[]) {
-    SupportedDecodeProfiles profiles;
+// static
+C2Config::profile_t V4L2Device::getDefaultProfile(VideoCodec codec) {
+    uint32_t queryId = 0;
 
-    const auto& supportedPixelformats =
-            enumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
+    switch (codec) {
+    case VideoCodec::H264:
+        queryId = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
+        break;
+    case VideoCodec::VP8:
+        queryId = V4L2_CID_MPEG_VIDEO_VP8_PROFILE;
+        break;
+    case VideoCodec::VP9:
+        queryId = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
+        break;
+    case VideoCodec::HEVC:
+        queryId = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
+        break;
+    default:
+        return C2Config::PROFILE_UNUSED;
+    }
 
-    for (uint32_t pixelFormat : supportedPixelformats) {
-        if (std::find(pixelFormats, pixelFormats + numFormats, pixelFormat) ==
-            pixelFormats + numFormats)
+    for (const auto& info : getDeviceInfosForType(Type::kDecoder)) {
+        scoped_refptr<V4L2Device> device = V4L2Device::create();
+        if (!device->openDevicePath(info.first, Type::kDecoder)) {
+            ALOGV("Failed opening %s", info.first.c_str());
             continue;
+        }
 
-        SupportedDecodeProfile profile;
-        getSupportedResolution(pixelFormat, &profile.min_resolution, &profile.max_resolution);
+        // Call to query control which will return structure including
+        // index of default profile
+        v4l2_queryctrl queryCtrl = {};
+        queryCtrl.id = queryId;
+        if (device->ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {
+            device->closeDevice();
+            continue;
+        }
 
-        const auto videoCodecProfiles = v4L2PixFmtToC2Profiles(pixelFormat, false);
+        v4l2_querymenu queryMenu = {};
+        queryMenu.id = queryCtrl.id;
+        queryMenu.index = queryCtrl.default_value;
+        if (device->ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
+            device->closeDevice();
+            return v4L2ProfileToC2Profile(codec, queryMenu.index);
+        }
 
-        for (const auto& videoCodecProfile : videoCodecProfiles) {
-            profile.profile = videoCodecProfile;
-            profiles.push_back(profile);
+        device->closeDevice();
+    }
+    return C2Config::PROFILE_UNUSED;
+}
 
-            ALOGV("Found decoder profile %s, resolutions: %s %s", profileToString(profile.profile),
-                  toString(profile.min_resolution).c_str(),
-                  toString(profile.max_resolution).c_str());
+// static
+C2Config::level_t V4L2Device::getDefaultLevel(VideoCodec codec) {
+    uint32_t queryId = 0;
+
+    switch (codec) {
+    case VideoCodec::H264:
+        queryId = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
+        break;
+#ifdef V4L2_CID_MPEG_VIDEO_VP9_LEVEL
+    case VideoCodec::VP9:
+        queryId = V4L2_CID_MPEG_VIDEO_VP9_LEVEL;
+        break;
+#endif
+    case VideoCodec::HEVC:
+        queryId = V4L2_CID_MPEG_VIDEO_HEVC_LEVEL;
+        break;
+    default:
+        return C2Config::LEVEL_UNUSED;
+    }
+
+    for (const auto& info : getDeviceInfosForType(Type::kDecoder)) {
+        scoped_refptr<V4L2Device> device = V4L2Device::create();
+        if (!device->openDevicePath(info.first, Type::kDecoder)) {
+            ALOGV("Failed opening %s", info.first.c_str());
+            continue;
+        }
+
+        v4l2_queryctrl queryCtrl = {};
+        queryCtrl.id = queryId;
+        if (device->ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {  // gets index of default profile
+            device->closeDevice();
+            continue;
         }
+
+        v4l2_querymenu queryMenu = {};
+        queryMenu.id = queryCtrl.id;
+        queryMenu.index = queryCtrl.default_value;
+        if (device->ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
+            device->closeDevice();
+            return v4L2LevelToC2Level(codec, queryMenu.index);
+        }
+
+        device->closeDevice();
     }
 
-    return profiles;
+    return C2Config::LEVEL_UNUSED;
+}
+
+// static
+SupportedCapabilities V4L2Device::queryDecodingCapabilities(VideoCodec codec) {
+    SupportedCapabilities caps;
+    caps.codec = codec;
+    caps.supportedLevels = V4L2Device::getSupportedDecodeLevels(codec);
+    caps.defaultLevel = V4L2Device::getDefaultLevel(codec);
+    caps.supportedProfiles = V4L2Device::getSupportedProfiles(
+            V4L2Device::Type::kDecoder, {V4L2Device::videoCodecToPixFmt(codec)});
+    caps.defaultLevel = V4L2Device::getDefaultLevel(codec);
+
+    return caps;
+}
+
+// static
+SupportedCapabilities V4L2Device::queryEncodingCapabilities(VideoCodec codec) {
+    SupportedCapabilities caps;
+    caps.codec = codec;
+    caps.supportedProfiles = V4L2Device::getSupportedProfiles(
+            V4L2Device::Type::kEncoder, {V4L2Device::videoCodecToPixFmt(codec)});
+    return caps;
 }
 
-V4L2Device::SupportedEncodeProfiles V4L2Device::enumerateSupportedEncodeProfiles() {
-    SupportedEncodeProfiles profiles;
+std::vector<C2Config::level_t> V4L2Device::enumerateSupportedDecodeLevels(
+        VideoCodec videoCodecType) {
+    std::vector<C2Config::level_t> supportedLevels;
 
     const auto& supportedPixelformats =
-            enumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+            enumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
 
-    for (const auto& pixelformat : supportedPixelformats) {
-        SupportedEncodeProfile profile;
-        profile.max_framerate_numerator = 30;
-        profile.max_framerate_denominator = 1;
-        ui::Size minResolution;
-        getSupportedResolution(pixelformat, &minResolution, &profile.max_resolution);
+    for (uint32_t pixelFormat : supportedPixelformats) {
+        if (isValidPixFmtForCodec(videoCodecType, pixelFormat)) {
+            std::vector<C2Config::level_t> levels = queryC2Levels(pixelFormat);
+            supportedLevels.insert(supportedLevels.end(), levels.begin(), levels.end());
+        }
+    }
 
-        const auto videoCodecProfiles = v4L2PixFmtToC2Profiles(pixelformat, true);
+    return supportedLevels;
+}
+
+SupportedProfiles V4L2Device::enumerateSupportedProfiles(
+        V4L2Device::Type type, const std::vector<uint32_t>& pixelFormats) {
+    SupportedProfiles profiles;
+
+    v4l2_buf_type bufType;
+    switch (type) {
+    case Type::kDecoder:
+        bufType = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+        break;
+    case Type::kEncoder:
+        bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+        break;
+    }
+
+    const auto& supportedPixelformats = enumerateSupportedPixelformats(bufType);
+
+    for (uint32_t pixelFormat : supportedPixelformats) {
+        if (std::find(pixelFormats.begin(), pixelFormats.end(), pixelFormat) == pixelFormats.end())
+            continue;
+
+        SupportedProfile profile;
+        if (type == Type::kEncoder) {
+            profile.max_framerate_numerator = 30;
+            profile.max_framerate_denominator = 1;
+        }
+
+        getSupportedResolution(pixelFormat, &profile.min_resolution, &profile.max_resolution);
+
+        const auto videoCodecProfiles = queryC2Profiles(pixelFormat);
 
         for (const auto& videoCodecProfile : videoCodecProfiles) {
             profile.profile = videoCodecProfile;
             profiles.push_back(profile);
 
-            ALOGV("Found encoder profile %s, max resolution: %s", profileToString(profile.profile),
+            ALOGV("Found profile %s, resolutions: %s %s", profileToString(profile.profile),
+                  toString(profile.min_resolution).c_str(),
                   toString(profile.max_resolution).c_str());
         }
     }
@@ -1904,12 +2364,14 @@ V4L2Device::SupportedEncodeProfiles V4L2Device::enumerateSupportedEncodeProfiles
     return profiles;
 }
 
-bool V4L2Device::startPolling(android::V4L2DevicePoller::EventCallback eventCallback,
+bool V4L2Device::startPolling(scoped_refptr<base::SequencedTaskRunner> taskRunner,
+                              android::V4L2DevicePoller::EventCallback eventCallback,
                               base::RepeatingClosure errorCallback) {
     DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
 
     if (!mDevicePoller) {
-        mDevicePoller = std::make_unique<android::V4L2DevicePoller>(this, "V4L2DeviceThreadPoller");
+        mDevicePoller = std::make_unique<android::V4L2DevicePoller>(this, "V4L2DeviceThreadPoller",
+                                                                    std::move(taskRunner));
     }
 
     bool ret = mDevicePoller->startPolling(std::move(eventCallback), std::move(errorCallback));
@@ -1994,70 +2456,59 @@ void V4L2Device::closeDevice() {
     mDeviceFd.reset();
 }
 
-void V4L2Device::enumerateDevicesForType(Type type) {
+// static
+const V4L2Device::DeviceInfos& V4L2Device::getDeviceInfosForType(V4L2Device::Type type) {
     // video input/output devices are registered as /dev/videoX in V4L2.
-    static const std::string kVideoDevicePattern = "/dev/video";
+    static constexpr const char* kVideoDevicePattern = "/dev/video";
+    static const DeviceInfos sNoDevices = {};
+    static std::mutex sDeviceInfosCacheLock;
+    static std::map<Type, DeviceInfos> sDeviceInfosCache;
+
+    std::lock_guard lock(sDeviceInfosCacheLock);
+    if (sDeviceInfosCache.find(type) != sDeviceInfosCache.end()) {
+        return sDeviceInfosCache[type];
+    }
 
-    std::string devicePattern;
     v4l2_buf_type bufType;
     switch (type) {
     case Type::kDecoder:
-        devicePattern = kVideoDevicePattern;
         bufType = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
         break;
     case Type::kEncoder:
-        devicePattern = kVideoDevicePattern;
         bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
         break;
     default:
         ALOGE("Only decoder and encoder types are supported!!");
-        return;
+        return sNoDevices;
     }
 
-    std::vector<std::string> candidatePaths;
-
-    // TODO(posciak): Remove this legacy unnumbered device once all platforms are updated to use
-    // numbered devices.
-    candidatePaths.push_back(devicePattern);
-
-    // We are sandboxed, so we can't query directory contents to check which devices are actually
-    // available. Try to open the first 16; if not present, we will just fail to open immediately.
-    for (int i = 0; i < 16; ++i) {
-        candidatePaths.push_back(base::StringPrintf("%s%d", devicePattern.c_str(), i));
-    }
+    DeviceInfos deviceInfos;
+    for (int i = 0; i < 10; ++i) {
+        std::string path = base::StringPrintf("%s%d", kVideoDevicePattern, i);
 
-    Devices devices;
-    for (const auto& path : candidatePaths) {
-        if (!openDevicePath(path, type)) {
+        scoped_refptr<V4L2Device> device = V4L2Device::create();
+        if (!device->openDevicePath(path, type)) {
             continue;
         }
 
-        const auto& supportedPixelformats = enumerateSupportedPixelformats(bufType);
+        const auto& supportedPixelformats = device->enumerateSupportedPixelformats(bufType);
         if (!supportedPixelformats.empty()) {
             ALOGV("Found device: %s", path.c_str());
-            devices.push_back(std::make_pair(path, supportedPixelformats));
+            deviceInfos.push_back(std::make_pair(path, supportedPixelformats));
         }
 
-        closeDevice();
+        device->closeDevice();
     }
 
-    ALOG_ASSERT(mDevicesByType.count(type) == 0u);
-    mDevicesByType[type] = devices;
-}
-
-const V4L2Device::Devices& V4L2Device::getDevicesForType(Type type) {
-    if (mDevicesByType.count(type) == 0) enumerateDevicesForType(type);
+    sDeviceInfosCache[type] = deviceInfos;
 
-    ALOG_ASSERT(mDevicesByType.count(type) != 0u);
-    return mDevicesByType[type];
+    return sDeviceInfosCache[type];
 }
 
 std::string V4L2Device::getDevicePathFor(Type type, uint32_t pixFmt) {
-    const Devices& devices = getDevicesForType(type);
-
-    for (const auto& device : devices) {
-        if (std::find(device.second.begin(), device.second.end(), pixFmt) != device.second.end())
-            return device.first;
+    for (const auto& info : getDeviceInfosForType(type)) {
+        if (std::find(info.second.begin(), info.second.end(), pixFmt) != info.second.end())
+            return info.first;
     }
 
     return std::string();
diff --git a/common/V4L2DevicePoller.cpp b/v4l2/V4L2DevicePoller.cpp
similarity index 64%
rename from common/V4L2DevicePoller.cpp
rename to v4l2/V4L2DevicePoller.cpp
index 5f2d0a5568d39a38b81b511133f0d379d22e8f6c..b4add7d4b8760d41ef51cfdd0d40e11341f53f7b 100644
--- a/common/V4L2DevicePoller.cpp
+++ b/v4l2/V4L2DevicePoller.cpp
@@ -3,7 +3,10 @@
 // found in the LICENSE file.
 // Note: ported from Chromium commit head: 22d34680c8ac
 
-#include <v4l2_codec2/common/V4L2DevicePoller.h>
+//#define LOG_NDEBUG 0
+#define LOG_TAG "V4L2DevicePoller"
+
+#include <v4l2_codec2/v4l2/V4L2DevicePoller.h>
 
 #include <string>
 
@@ -12,15 +15,15 @@
 #include <base/threading/thread_checker.h>
 #include <log/log.h>
 
-#include <v4l2_codec2/common/V4L2Device.h>
+#include <v4l2_codec2/v4l2/V4L2Device.h>
 
 namespace android {
 
-V4L2DevicePoller::V4L2DevicePoller(V4L2Device* const device, const std::string& threadName)
+V4L2DevicePoller::V4L2DevicePoller(V4L2Device* const device, const std::string& threadName,
+                                   scoped_refptr<base::SequencedTaskRunner> taskRunner)
       : mDevice(device),
         mPollThread(std::move(threadName)),
-        mTriggerPoll(base::WaitableEvent::ResetPolicy::AUTOMATIC,
-                     base::WaitableEvent::InitialState::NOT_SIGNALED),
+        mClientTaskTunner(std::move(taskRunner)),
         mStopPolling(false) {}
 
 V4L2DevicePoller::~V4L2DevicePoller() {
@@ -31,11 +34,12 @@ V4L2DevicePoller::~V4L2DevicePoller() {
 
 bool V4L2DevicePoller::startPolling(EventCallback eventCallback,
                                     base::RepeatingClosure errorCallback) {
+    ALOG_ASSERT(mClientTaskTunner->RunsTasksInCurrentSequence());
+
     if (isPolling()) return true;
 
     ALOGV("Starting polling");
 
-    mClientTaskTunner = base::SequencedTaskRunnerHandle::Get();
     mErrorCallback = errorCallback;
 
     if (!mPollThread.Start()) {
@@ -45,6 +49,7 @@ bool V4L2DevicePoller::startPolling(EventCallback eventCallback,
 
     mEventCallback = std::move(eventCallback);
 
+    mPollBuffers.store(false);
     mStopPolling.store(false);
     mPollThread.task_runner()->PostTask(
             FROM_HERE, base::BindOnce(&V4L2DevicePoller::devicePollTask, base::Unretained(this)));
@@ -65,8 +70,6 @@ bool V4L2DevicePoller::stopPolling() {
 
     mStopPolling.store(true);
 
-    mTriggerPoll.Signal();
-
     if (!mDevice->setDevicePollInterrupt()) {
         ALOGE("Failed to interrupt device poll.");
         return false;
@@ -97,17 +100,21 @@ void V4L2DevicePoller::schedulePoll() {
     // A call to DevicePollTask() will be posted when we actually start polling.
     if (!isPolling()) return;
 
-    ALOGV("Scheduling poll");
-
-    mTriggerPoll.Signal();
+    if (!mPollBuffers.exchange(true)) {
+        // Call mDevice->setDevicePollInterrupt only if pollBuffers is not
+        // already pending.
+        ALOGV("Scheduling poll");
+        if (!mDevice->setDevicePollInterrupt()) {
+            ALOGE("Failed to clear interrupting device poll.");
+        }
+    }
 }
 
 void V4L2DevicePoller::devicePollTask() {
-    ALOG_ASSERT(mClientTaskTunner->RunsTasksInCurrentSequence());
+    ALOG_ASSERT(mPollThread.task_runner()->RunsTasksInCurrentSequence());
 
     while (true) {
         ALOGV("Waiting for poll to be scheduled.");
-        mTriggerPoll.Wait();
 
         if (mStopPolling) {
             ALOGV("Poll stopped, exiting.");
@@ -115,15 +122,31 @@ void V4L2DevicePoller::devicePollTask() {
         }
 
         bool event_pending = false;
+        bool buffers_pending = false;
+
         ALOGV("Polling device.");
-        if (!mDevice->poll(true, &event_pending)) {
+        bool poll_buffers = mPollBuffers.exchange(false);
+        if (!mDevice->poll(true, poll_buffers, &event_pending, &buffers_pending)) {
             ALOGE("An error occurred while polling, calling error callback");
             mClientTaskTunner->PostTask(FROM_HERE, mErrorCallback);
             return;
         }
 
-        ALOGV("Poll returned, calling event callback.");
-        mClientTaskTunner->PostTask(FROM_HERE, base::Bind(mEventCallback, event_pending));
+        if (poll_buffers && !buffers_pending) {
+            // If buffer polling was requested but the buffers are not pending,
+            // then set to poll buffers again in the next iteration.
+            mPollBuffers.exchange(true);
+        }
+
+        if (!mDevice->clearDevicePollInterrupt()) {
+            ALOGE("Failed to clear interrupting device poll.");
+        }
+
+        if (buffers_pending || event_pending) {
+            ALOGV("Poll returned, calling event callback. event_pending=%d buffers_pending=%d",
+                  event_pending, buffers_pending);
+            mClientTaskTunner->PostTask(FROM_HERE, base::Bind(mEventCallback, event_pending));
+        }
     }
 }
 
diff --git a/v4l2/V4L2EncodeComponent.cpp b/v4l2/V4L2EncodeComponent.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..6d8f037bb84f3d8a057945d9a62a3f6ee82dd7da
--- /dev/null
+++ b/v4l2/V4L2EncodeComponent.cpp
@@ -0,0 +1,123 @@
+// Copyright 2023 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "V4L2EncodeComponent"
+
+#include <v4l2_codec2/v4l2/V4L2EncodeComponent.h>
+
+#include <base/bind_helpers.h>
+
+#include <cutils/properties.h>
+
+#include <v4l2_codec2/components/BitstreamBuffer.h>
+#include <v4l2_codec2/components/EncodeInterface.h>
+#include <v4l2_codec2/v4l2/V4L2Encoder.h>
+
+namespace android {
+
+namespace {
+
+// Check whether the specified |profile| is an H.264 profile.
+bool IsH264Profile(C2Config::profile_t profile) {
+    return (profile >= C2Config::PROFILE_AVC_BASELINE &&
+            profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH);
+}
+}  // namespace
+
+// static
+std::atomic<int32_t> V4L2EncodeComponent::sConcurrentInstances = 0;
+
+// static
+std::shared_ptr<C2Component> V4L2EncodeComponent::create(
+        C2String name, c2_node_id_t id, std::shared_ptr<EncodeInterface> intfImpl,
+        C2ComponentFactory::ComponentDeleter deleter) {
+    ALOGV("%s(%s)", __func__, name.c_str());
+
+    static const int32_t kMaxConcurrentInstances =
+            property_get_int32("ro.vendor.v4l2_codec2.encode_concurrent_instances", -1);
+
+    static std::mutex mutex;
+    std::lock_guard<std::mutex> lock(mutex);
+    if (kMaxConcurrentInstances >= 0 && sConcurrentInstances.load() >= kMaxConcurrentInstances) {
+        ALOGW("Cannot create additional encoder, maximum number of instances reached: %d",
+              kMaxConcurrentInstances);
+        return nullptr;
+    }
+
+    return std::shared_ptr<C2Component>(new V4L2EncodeComponent(name, id, std::move(intfImpl)),
+                                        deleter);
+}
+
+V4L2EncodeComponent::V4L2EncodeComponent(C2String name, c2_node_id_t id,
+                                         std::shared_ptr<EncodeInterface> interface)
+      : EncodeComponent(name, id, interface) {
+    ALOGV("%s():", __func__);
+    sConcurrentInstances.fetch_add(1, std::memory_order_relaxed);
+}
+
+V4L2EncodeComponent::~V4L2EncodeComponent() {
+    ALOGV("%s():", __func__);
+    sConcurrentInstances.fetch_sub(1, std::memory_order_relaxed);
+}
+
+bool V4L2EncodeComponent::initializeEncoder() {
+    ALOGV("%s()", __func__);
+    ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
+    ALOG_ASSERT(!mInputFormatConverter);
+    ALOG_ASSERT(!mEncoder);
+
+    mLastFrameTime = std::nullopt;
+
+    // Get the requested profile and level.
+    C2Config::profile_t outputProfile = mInterface->getOutputProfile();
+
+    // CSD only needs to be extracted when using an H.264 profile.
+    mExtractCSD = IsH264Profile(outputProfile);
+
+    std::optional<uint8_t> h264Level;
+    if (IsH264Profile(outputProfile)) {
+        h264Level = c2LevelToV4L2Level(mInterface->getOutputLevel());
+    }
+
+    // Get the stride used by the C2 framework, as this might be different from the stride used by
+    // the V4L2 encoder.
+    std::optional<uint32_t> stride =
+            getVideoFrameStride(VideoEncoder::kInputPixelFormat, mInterface->getInputVisibleSize());
+    if (!stride) {
+        ALOGE("Failed to get video frame stride");
+        reportError(C2_CORRUPTED);
+        return false;
+    }
+
+    // Get the requested bitrate mode and bitrate. The C2 framework doesn't offer a parameter to
+    // configure the peak bitrate, so we use a multiple of the target bitrate.
+    mBitrateMode = mInterface->getBitrateMode();
+    if (property_get_bool("persist.vendor.v4l2_codec2.disable_vbr", false)) {
+        // NOTE: This is a workaround for b/235771157.
+        ALOGW("VBR is disabled on this device");
+        mBitrateMode = C2Config::BITRATE_CONST;
+    }
+
+    mBitrate = mInterface->getBitrate();
+
+    mEncoder = V4L2Encoder::create(
+            outputProfile, h264Level, mInterface->getInputVisibleSize(), *stride,
+            mInterface->getKeyFramePeriod(), mBitrateMode, mBitrate,
+            mBitrate * VideoEncoder::kPeakBitrateMultiplier,
+            ::base::BindRepeating(&V4L2EncodeComponent::fetchOutputBlock, mWeakThis),
+            ::base::BindRepeating(&V4L2EncodeComponent::onInputBufferDone, mWeakThis),
+            ::base::BindRepeating(&V4L2EncodeComponent::onOutputBufferDone, mWeakThis),
+            ::base::BindRepeating(&V4L2EncodeComponent::onDrainDone, mWeakThis),
+            ::base::BindRepeating(&V4L2EncodeComponent::reportError, mWeakThis, C2_CORRUPTED),
+            mEncoderTaskRunner);
+    if (!mEncoder) {
+        ALOGE("Failed to create V4L2Encoder (profile: %s)", profileToString(outputProfile));
+        return false;
+    }
+
+    return true;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/components/V4L2Encoder.cpp b/v4l2/V4L2Encoder.cpp
similarity index 94%
rename from components/V4L2Encoder.cpp
rename to v4l2/V4L2Encoder.cpp
index cd20cb5afd4b17ef9f6c62a3bc7b9a4b5384d384..730827cf097c3ee26db68204f0368f043d17c17c 100644
--- a/components/V4L2Encoder.cpp
+++ b/v4l2/V4L2Encoder.cpp
@@ -5,7 +5,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "V4L2Encoder"
 
-#include <v4l2_codec2/components/V4L2Encoder.h>
+#include <v4l2_codec2/v4l2/V4L2Encoder.h>
 
 #include <stdint.h>
 #include <optional>
@@ -19,15 +19,13 @@
 
 #include <v4l2_codec2/common/EncodeHelpers.h>
 #include <v4l2_codec2/common/Fourcc.h>
-#include <v4l2_codec2/common/V4L2Device.h>
 #include <v4l2_codec2/components/BitstreamBuffer.h>
+#include <v4l2_codec2/v4l2/V4L2Device.h>
 
 namespace android {
 
 namespace {
 
-const VideoPixelFormat kInputPixelFormat = VideoPixelFormat::NV12;
-
 // The maximum size for output buffer, which is chosen empirically for a 1080p video.
 constexpr size_t kMaxBitstreamBufferSizeInBytes = 2 * 1024 * 1024;  // 2MB
 // The frame size for 1080p (FHD) video in pixels.
@@ -181,6 +179,11 @@ bool V4L2Encoder::setFramerate(uint32_t framerate) {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
+    if (framerate == 0) {
+        ALOGE("Requesting invalid framerate 0");
+        return false;
+    }
+
     struct v4l2_streamparm parms;
     memset(&parms, 0, sizeof(v4l2_streamparm));
     parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
@@ -218,7 +221,7 @@ bool V4L2Encoder::initialize(C2Config::profile_t outputProfile, std::optional<ui
 
     // Open the V4L2 device for encoding to the requested output format.
     // TODO(dstaessens): Avoid conversion to VideoCodecProfile and use C2Config::profile_t directly.
-    uint32_t outputPixelFormat = V4L2Device::C2ProfileToV4L2PixFmt(outputProfile, false);
+    uint32_t outputPixelFormat = V4L2Device::c2ProfileToV4L2PixFmt(outputProfile, false);
     if (!outputPixelFormat) {
         ALOGE("Invalid output profile %s", profileToString(outputProfile));
         return false;
@@ -570,7 +573,7 @@ bool V4L2Encoder::configureOutputFormat(C2Config::profile_t outputProfile) {
     ALOG_ASSERT(!mOutputQueue->isStreaming());
     ALOG_ASSERT(!isEmpty(mVisibleSize));
 
-    auto format = mOutputQueue->setFormat(V4L2Device::C2ProfileToV4L2PixFmt(outputProfile, false),
+    auto format = mOutputQueue->setFormat(V4L2Device::c2ProfileToV4L2PixFmt(outputProfile, false),
                                           mVisibleSize, GetMaxOutputBufferSize(mVisibleSize));
     if (!format) {
         ALOGE("Failed to set output format to %s", profileToString(outputProfile));
@@ -631,6 +634,18 @@ bool V4L2Encoder::configureH264(C2Config::profile_t outputProfile,
         ALOGV("Device doesn't support prepending SPS and PPS to IDR, injecting manually.");
     }
 
+    // Set the H.264 profile.
+    const int32_t profile = V4L2Device::c2ProfileToV4L2H264Profile(outputProfile);
+    if (profile < 0) {
+        ALOGE("Trying to set invalid H.264 profile");
+        return false;
+    }
+    if (!mDevice->setExtCtrls(V4L2_CTRL_CLASS_MPEG,
+                              {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_H264_PROFILE, profile)})) {
+        ALOGE("Failed setting H.264 profile to %u", outputProfile);
+        return false;
+    }
+
     std::vector<V4L2ExtCtrl> h264Ctrls;
 
     // No B-frames, for lowest decoding latency.
@@ -638,14 +653,6 @@ bool V4L2Encoder::configureH264(C2Config::profile_t outputProfile,
     // Quantization parameter maximum value (for variable bitrate control).
     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_H264_MAX_QP, 51);
 
-    // Set H.264 profile.
-    int32_t profile = V4L2Device::c2ProfileToV4L2H264Profile(outputProfile);
-    if (profile < 0) {
-        ALOGE("Trying to set invalid H.264 profile");
-        return false;
-    }
-    h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_H264_PROFILE, profile);
-
     // Set H.264 output level. Use Level 4.0 as fallback default.
     int32_t h264Level =
             static_cast<int32_t>(outputH264Level.value_or(V4L2_MPEG_VIDEO_H264_LEVEL_4_0));
@@ -666,7 +673,7 @@ bool V4L2Encoder::configureBitrateMode(C2Config::bitrate_mode_t bitrateMode) {
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
     v4l2_mpeg_video_bitrate_mode v4l2BitrateMode =
-            V4L2Device::C2BitrateModeToV4L2BitrateMode(bitrateMode);
+            V4L2Device::c2BitrateModeToV4L2BitrateMode(bitrateMode);
     if (!mDevice->setExtCtrls(V4L2_CTRL_CLASS_MPEG,
                               {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_BITRATE_MODE, v4l2BitrateMode)})) {
         // TODO(b/190336806): Our stack doesn't support bitrate mode changes yet. We default to CBR
@@ -680,7 +687,8 @@ bool V4L2Encoder::startDevicePoll() {
     ALOGV("%s()", __func__);
     ALOG_ASSERT(mTaskRunner->RunsTasksInCurrentSequence());
 
-    if (!mDevice->startPolling(::base::BindRepeating(&V4L2Encoder::serviceDeviceTask, mWeakThis),
+    if (!mDevice->startPolling(mTaskRunner,
+                               ::base::BindRepeating(&V4L2Encoder::serviceDeviceTask, mWeakThis),
                                ::base::BindRepeating(&V4L2Encoder::onPollError, mWeakThis))) {
         ALOGE("Device poll thread failed to start");
         onError();
@@ -741,7 +749,7 @@ bool V4L2Encoder::enqueueInputBuffer(std::unique_ptr<InputFrame> frame) {
     ALOG_ASSERT(mInputLayout->mPlanes.size() == frame->planes().size());
 
     auto format = frame->pixelFormat();
-    auto planes = frame->planes();
+    auto& planes = frame->planes();
     auto index = frame->index();
     auto timestamp = frame->timestamp();
 
@@ -759,28 +767,43 @@ bool V4L2Encoder::enqueueInputBuffer(std::unique_ptr<InputFrame> frame) {
              .tv_usec = static_cast<time_t>(timestamp % ::base::Time::kMicrosecondsPerSecond)});
     size_t bufferId = buffer->bufferId();
 
-    for (size_t i = 0; i < planes.size(); ++i) {
-        // Single-buffer input format may have multiple color planes, so bytesUsed of the single
-        // buffer should be sum of each color planes' size.
-        size_t bytesUsed = 0;
-        if (planes.size() == 1) {
-            bytesUsed = allocationSize(format, mInputLayout->mCodedSize);
-        } else {
-            bytesUsed = ::base::checked_cast<size_t>(
+    std::vector<int> fds = frame->fds();
+    if (mInputLayout->mMultiPlanar) {
+        // If the input format is multi-planar, then we need to submit one memory plane per color
+        // plane of our input frames.
+        for (size_t i = 0; i < planes.size(); ++i) {
+            size_t bytesUsed = ::base::checked_cast<size_t>(
                     getArea(planeSize(format, i, mInputLayout->mCodedSize)).value());
+
+            // TODO(crbug.com/901264): The way to pass an offset within a DMA-buf is not defined
+            // in V4L2 specification, so we abuse data_offset for now. Fix it when we have the
+            // right interface, including any necessary validation and potential alignment.
+            buffer->setPlaneDataOffset(i, planes[i].mOffset);
+            bytesUsed += planes[i].mOffset;
+            // Workaround: filling length should not be needed. This is a bug of videobuf2 library.
+            buffer->setPlaneSize(i, mInputLayout->mPlanes[i].mSize + planes[i].mOffset);
+            buffer->setPlaneBytesUsed(i, bytesUsed);
         }
+    } else {
+        ALOG_ASSERT(!planes.empty());
+        // If the input format is single-planar, then we only submit one buffer which contains
+        // all the color planes.
+        size_t bytesUsed = allocationSize(format, mInputLayout->mCodedSize);
 
         // TODO(crbug.com/901264): The way to pass an offset within a DMA-buf is not defined
         // in V4L2 specification, so we abuse data_offset for now. Fix it when we have the
         // right interface, including any necessary validation and potential alignment.
-        buffer->setPlaneDataOffset(i, planes[i].mOffset);
-        bytesUsed += planes[i].mOffset;
+        buffer->setPlaneDataOffset(0, planes[0].mOffset);
+        bytesUsed += planes[0].mOffset;
         // Workaround: filling length should not be needed. This is a bug of videobuf2 library.
-        buffer->setPlaneSize(i, mInputLayout->mPlanes[i].mSize + planes[i].mOffset);
-        buffer->setPlaneBytesUsed(i, bytesUsed);
+        buffer->setPlaneSize(0, bytesUsed);
+        buffer->setPlaneBytesUsed(0, bytesUsed);
+        // We only have one memory plane so we shall submit only one FD. The others are duplicates
+        // of the first one anyway.
+        fds.resize(1);
     }
 
-    if (!std::move(*buffer).queueDMABuf(frame->fds())) {
+    if (!std::move(*buffer).queueDMABuf(fds)) {
         ALOGE("Failed to queue input buffer using QueueDMABuf");
         onError();
         return false;
diff --git a/common/include/v4l2_codec2/common/V4L2ComponentCommon.h b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentCommon.h
similarity index 61%
rename from common/include/v4l2_codec2/common/V4L2ComponentCommon.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2ComponentCommon.h
index a5fbdaf23d9f200f2444765fe8c1f1d568fbe336..7a9e36a16c9e2bb2f16e5cf6b1ee1615ad1c7a13 100644
--- a/common/include/v4l2_codec2/common/V4L2ComponentCommon.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentCommon.h
@@ -2,9 +2,11 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMMON_V4L2_COMPONENT_COMMON_H
-#define ANDROID_V4L2_CODEC2_COMMON_V4L2_COMPONENT_COMMON_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_COMMON_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_COMMON_H
 
+#include <v4l2_codec2/common/VideoTypes.h>
+#include <optional>
 #include <string>
 
 namespace android {
@@ -25,13 +27,20 @@ struct V4L2ComponentName {
     static const std::string kHEVCSecureDecoder;
 
     // Return true if |name| is a valid component name.
-    static bool isValid(const char* name);
+    static bool isValid(const std::string& name);
 
     // Return true if |name| is a encoder name.
     // Note that |name| should be a valid component name.
-    static bool isEncoder(const char* name);
+    static bool isEncoder(const std::string& name);
+
+    // Return true if |name| is a decoder name.
+    // Note that |name| should be a valid component name.
+    static bool isDecoder(const std::string& name);
+
+    // Returns VideoCodec for |name| component
+    static std::optional<VideoCodec> getCodec(const std::string& name);
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMMON_V4L2_COMPONENT_COMMON_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_COMMON_H
diff --git a/components/include/v4l2_codec2/components/V4L2ComponentFactory.h b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentFactory.h
similarity index 71%
rename from components/include/v4l2_codec2/components/V4L2ComponentFactory.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2ComponentFactory.h
index fc6abea15c86c5cdd4998d1511ae9bba098be471..2c256289e84e864f92c74531d5dd584269ed7df3 100644
--- a/components/include/v4l2_codec2/components/V4L2ComponentFactory.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentFactory.h
@@ -2,17 +2,22 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_FACTORY_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_FACTORY_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_FACTORY_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_FACTORY_H
 
 #include <memory>
 #include <string>
 
 #include <C2ComponentFactory.h>
 #include <util/C2InterfaceHelper.h>
+#include <v4l2_codec2/common/Common.h>
 
 namespace android {
 
+struct SupportedCapabilities;
+class DecodeInterface;
+class EncodeInterface;
+
 class V4L2ComponentFactory : public C2ComponentFactory {
 public:
     static std::unique_ptr<V4L2ComponentFactory> create(
@@ -29,11 +34,15 @@ public:
                                 InterfaceDeleter deleter) override;
 
 private:
+    c2_status_t createEncodeInterface(std::shared_ptr<EncodeInterface>* intfImpl);
+    c2_status_t createDecodeInterface(std::shared_ptr<DecodeInterface>* intfImpl);
+
     const std::string mComponentName;
     const bool mIsEncoder;
     std::shared_ptr<C2ReflectorHelper> mReflector;
+    std::unique_ptr<SupportedCapabilities> mCapabilites;
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_COMPONENT_FACTORY_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_FACTORY_H
diff --git a/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentStore.h b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentStore.h
new file mode 100644
index 0000000000000000000000000000000000000000..4033b791e82eb2e3cdf62966635eba6d4d523473
--- /dev/null
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2ComponentStore.h
@@ -0,0 +1,18 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_STORE_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_STORE_H
+
+#include <C2Component.h>
+
+namespace android {
+
+struct V4L2ComponentStore {
+    static std::shared_ptr<C2ComponentStore> Create();
+};
+
+}  // namespace android
+
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_COMPONENT_STORE_H
diff --git a/v4l2/include/v4l2_codec2/v4l2/V4L2DecodeComponent.h b/v4l2/include/v4l2_codec2/v4l2/V4L2DecodeComponent.h
new file mode 100644
index 0000000000000000000000000000000000000000..dbb607f60e79b7294fab285c87dd6914d3efdb07
--- /dev/null
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2DecodeComponent.h
@@ -0,0 +1,28 @@
+
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODE_COMPONENT_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODE_COMPONENT_H
+
+#include <v4l2_codec2/components/DecodeComponent.h>
+
+namespace android {
+class V4L2DecodeComponent : public DecodeComponent {
+public:
+    static std::shared_ptr<C2Component> create(const std::string& name, c2_node_id_t id,
+                                               std::shared_ptr<DecodeInterface> intfImpl,
+                                               C2ComponentFactory::ComponentDeleter deleter);
+
+    V4L2DecodeComponent(uint32_t debugStreamId, const std::string& name, c2_node_id_t id,
+                        std::shared_ptr<DecodeInterface> intfImpl);
+
+    ~V4L2DecodeComponent() override;
+
+    void startTask(c2_status_t* status, ::base::WaitableEvent* done) override;
+
+private:
+    static std::atomic<int32_t> sConcurrentInstances;
+    static std::atomic<uint32_t> sNextDebugStreamId;
+};
+
+};  // namespace android
+
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODE_COMPONENT_H
\ No newline at end of file
diff --git a/components/include/v4l2_codec2/components/V4L2Decoder.h b/v4l2/include/v4l2_codec2/v4l2/V4L2Decoder.h
similarity index 53%
rename from components/include/v4l2_codec2/components/V4L2Decoder.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2Decoder.h
index 2ecb3bdc8deb536c297f6043107bb77eac03bf9a..e569c1c5d37b5ded75f987f4bcef1957fb6ecaf6 100644
--- a/components/include/v4l2_codec2/components/V4L2Decoder.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2Decoder.h
@@ -2,33 +2,45 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODER_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODER_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODER_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODER_H
 
 #include <stdint.h>
 
+#include <cstdint>
 #include <memory>
 #include <optional>
+#include <utility>
 
 #include <base/callback.h>
 #include <base/memory/weak_ptr.h>
 
+#include <ui/GraphicBuffer.h>
 #include <ui/Rect.h>
 #include <ui/Size.h>
-#include <v4l2_codec2/common/V4L2Device.h>
+#include <v4l2_codec2/common/Fourcc.h>
 #include <v4l2_codec2/common/VideoTypes.h>
 #include <v4l2_codec2/components/VideoDecoder.h>
 #include <v4l2_codec2/components/VideoFrame.h>
 #include <v4l2_codec2/components/VideoFramePool.h>
+#include <v4l2_codec2/plugin_store/DmabufHelpers.h>
+#include <v4l2_codec2/v4l2/V4L2Device.h>
 
 namespace android {
 
+// Currently we only support flexible pixel 420 format YCBCR_420_888 in Android.
+// Here is the list of flexible 420 format.
+constexpr std::initializer_list<uint32_t> kSupportedOutputFourccs = {
+        Fourcc::YU12, Fourcc::YV12, Fourcc::YM12, Fourcc::YM21,
+        Fourcc::NV12, Fourcc::NV21, Fourcc::NM12, Fourcc::NM21,
+};
+
 class V4L2Decoder : public VideoDecoder {
 public:
     static std::unique_ptr<VideoDecoder> Create(
-            const VideoCodec& codec, const size_t inputBufferSize, const size_t minNumOutputBuffers,
-            GetPoolCB getPoolCB, OutputCB outputCb, ErrorCB errorCb,
-            scoped_refptr<::base::SequencedTaskRunner> taskRunner);
+            uint32_t debugStreamId, const VideoCodec& codec, const size_t inputBufferSize,
+            const size_t minNumOutputBuffers, GetPoolCB getPoolCB, OutputCB outputCb,
+            ErrorCB errorCb, scoped_refptr<::base::SequencedTaskRunner> taskRunner, bool isSecure);
     ~V4L2Decoder() override;
 
     void decode(std::unique_ptr<ConstBitstreamBuffer> buffer, DecodeCB decodeCb) override;
@@ -36,6 +48,8 @@ public:
     void flush() override;
 
 private:
+    static constexpr size_t kNumInputBuffers = 16;
+
     enum class State {
         Idle,  // Not received any decode buffer after initialized, flushed, or drained.
         Decoding,
@@ -54,11 +68,20 @@ private:
         DecodeCB decodeCb;
     };
 
-    V4L2Decoder(scoped_refptr<::base::SequencedTaskRunner> taskRunner);
+    V4L2Decoder(uint32_t debugStreamId, scoped_refptr<::base::SequencedTaskRunner> taskRunner);
     bool start(const VideoCodec& codec, const size_t inputBufferSize,
                const size_t minNumOutputBuffers, GetPoolCB getPoolCb, OutputCB outputCb,
-               ErrorCB errorCb);
+               ErrorCB errorCb, bool isSecure);
     bool setupInputFormat(const uint32_t inputPixelFormat, const size_t inputBufferSize);
+
+    // Sets minimal resolution and allocates minimal amount of output buffers for
+    // drain done signaling.
+    bool setupInitialOutput();
+    // Find the first output format and sets output to its minimal resolution.
+    bool setupMinimalOutputFormat();
+    // Allocates the at least |minOutputBuffersCount| of output buffers using set format
+    bool startOutputQueue(size_t minOutputBuffersCount, enum v4l2_memory memory);
+
     void pumpDecodeRequest();
 
     void serviceDeviceTask(bool event);
@@ -77,14 +100,34 @@ private:
     void setState(State newState);
     void onError();
 
+    uint32_t mDebugStreamId;
+
     std::unique_ptr<VideoFramePool> mVideoFramePool;
 
     scoped_refptr<V4L2Device> mDevice;
     scoped_refptr<V4L2Queue> mInputQueue;
     scoped_refptr<V4L2Queue> mOutputQueue;
 
+    // Contains the initial EOS buffer, until DRC event is dequeued.
+    sp<GraphicBuffer> mInitialEosBuffer;
+
     std::queue<DecodeRequest> mDecodeRequests;
     std::map<int32_t, DecodeCB> mPendingDecodeCbs;
+    // Marks that we need to wait for DRC before drain can complete.
+    bool mPendingDRC = false;
+    // Holds information about secure playback, which won't allow decoder to
+    // access frames in order to provide extra meta information (like checking
+    // for pending DRC).
+    bool mIsSecure;
+    VideoCodec mCodec;
+
+    // Tracks the last DMA buffer ID which was used for a given V4L2 input
+    // buffer ID. Used to try to avoid re-importing buffers.
+    unique_id_t mLastDmaBufferId[kNumInputBuffers];
+
+    // The next input buffer ID to allocate. Note that since we don't un-allocate
+    // ids, all entries less than this in mLastDmaBufferId are valid.
+    size_t mNextInputBufferId = 0;
 
     size_t mMinNumOutputBuffers = 0;
     GetPoolCB mGetPoolCb;
@@ -95,8 +138,15 @@ private:
     ui::Size mCodedSize;
     Rect mVisibleRect;
 
+    // Currently enqueued frame at the deocder device, mapped using V4L2 buffer ID.
     std::map<size_t, std::unique_ptr<VideoFrame>> mFrameAtDevice;
 
+    // A queue of previously enqueued frames, that were returned during flush
+    // (STREAMOFF). Those frames will be reused as soon as `tryFetchVideoFrame`
+    // is called. This is a workaround for b/297228544 and helps with general
+    // responsiveness of the video playback due to b/270003218.
+    std::queue<std::pair<size_t, std::unique_ptr<VideoFrame>>> mReuseFrameQueue;
+
     // Block IDs can be arbitrarily large, but we only have a limited number of
     // buffers. This maintains an association between a block ID and a specific
     // V4L2 buffer index.
@@ -112,4 +162,4 @@ private:
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_DECODER_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_DECODER_H
diff --git a/common/include/v4l2_codec2/common/V4L2Device.h b/v4l2/include/v4l2_codec2/v4l2/V4L2Device.h
similarity index 84%
rename from common/include/v4l2_codec2/common/V4L2Device.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2Device.h
index 9824221b67e352677f839778e2eb5130608ecab1..29d4a2ee840c7c58867030ddbf6105443153d2c9 100644
--- a/common/include/v4l2_codec2/common/V4L2Device.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2Device.h
@@ -6,13 +6,14 @@
 // delegate/pass the device specific handling of any of the functionalities.
 // Note: ported from Chromium commit head: 2f13d62f0c0d, but some parts have been removed.
 
-#ifndef ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_H
-#define ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_H
 
 #include <linux/videodev2.h>
 #include <stddef.h>
 #include <stdint.h>
 
+#include <cstdint>
 #include <optional>
 #include <vector>
 
@@ -23,8 +24,28 @@
 
 #include <ui/Size.h>
 #include <v4l2_codec2/common/Common.h>
-#include <v4l2_codec2/common/V4L2DevicePoller.h>
 #include <v4l2_codec2/common/VideoTypes.h>
+#include <v4l2_codec2/v4l2/V4L2DevicePoller.h>
+
+// VP8 parsed frames
+#ifndef V4L2_PIX_FMT_VP8_FRAME
+#define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F')
+#endif
+
+// VP9 parsed frames
+#ifndef V4L2_PIX_FMT_VP9_FRAME
+#define V4L2_PIX_FMT_VP9_FRAME v4l2_fourcc('V', 'P', '9', 'F')
+#endif
+
+// H264 parsed slices
+#ifndef V4L2_PIX_FMT_H264_SLICE
+#define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4')
+#endif
+
+// HEVC parsed slices
+#ifndef V4L2_PIX_FMT_HEVC_SLICE
+#define V4L2_PIX_FMT_HEVC_SLICE v4l2_fourcc('S', '2', '6', '5')
+#endif
 
 namespace android {
 
@@ -40,6 +61,8 @@ struct V4L2ExtCtrl {
     struct v4l2_ext_control ctrl;
 };
 
+bool isValidPixFmtForCodec(VideoCodec codec, uint32_t pixFmt);
+
 // A unique reference to a buffer for clients to prepare and submit.
 //
 // Clients can prepare a buffer for queuing using the methods of this class, and then either queue
@@ -287,6 +310,8 @@ private:
     // Called when clients request a buffer to be queued.
     bool queueBuffer(struct v4l2_buffer* v4l2Buffer);
 
+    void reportTraceMetrics();
+
     const enum v4l2_buf_type mType;
     enum v4l2_memory mMemory = V4L2_MEMORY_MMAP;
     bool mIsStreaming = false;
@@ -319,38 +344,19 @@ private:
 
 class V4L2Device : public ::base::RefCountedThreadSafe<V4L2Device> {
 public:
-    // Specification of an encoding profile supported by an encoder.
-    struct SupportedEncodeProfile {
-        C2Config::profile_t profile = C2Config::PROFILE_UNUSED;
-        ui::Size min_resolution;
-        ui::Size max_resolution;
-        uint32_t max_framerate_numerator = 0;
-        uint32_t max_framerate_denominator = 0;
-    };
-    using SupportedEncodeProfiles = std::vector<SupportedEncodeProfile>;
-
-    // Specification of a decoding profile supported by an decoder.
-    // |max_resolution| and |min_resolution| are inclusive.
-    struct SupportedDecodeProfile {
-        C2Config::profile_t profile = C2Config::PROFILE_UNUSED;
-        ui::Size max_resolution;
-        ui::Size min_resolution;
-        bool encrypted_only = false;
-    };
-    using SupportedDecodeProfiles = std::vector<SupportedDecodeProfile>;
-
     // Utility format conversion functions
     // If there is no corresponding single- or multi-planar format, returns 0.
-    static uint32_t C2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sliceBased);
+    static uint32_t c2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sliceBased);
+    static C2Config::level_t v4L2LevelToC2Level(VideoCodec codec, uint32_t level);
     static C2Config::profile_t v4L2ProfileToC2Profile(VideoCodec codec, uint32_t profile);
-    std::vector<C2Config::profile_t> v4L2PixFmtToC2Profiles(uint32_t pixFmt, bool isEncoder);
+    static uint32_t videoCodecToPixFmt(VideoCodec codec);
     // Calculates the largest plane's allocation size requested by a V4L2 device.
     static ui::Size allocatedSizeFromV4L2Format(const struct v4l2_format& format);
 
     // Convert required H264 profile and level to V4L2 enums.
     static int32_t c2ProfileToV4L2H264Profile(C2Config::profile_t profile);
     static int32_t h264LevelIdcToV4L2H264Level(uint8_t levelIdc);
-    static v4l2_mpeg_video_bitrate_mode C2BitrateModeToV4L2BitrateMode(
+    static v4l2_mpeg_video_bitrate_mode c2BitrateModeToV4L2BitrateMode(
             C2Config::bitrate_mode_t bitrateMode);
 
     // Converts v4l2_memory to a string.
@@ -359,6 +365,11 @@ public:
     // Returns the printable name of a v4l2_buf_type.
     static const char* v4L2BufferTypeToString(const enum v4l2_buf_type bufType);
 
+    // Converts v4l2_buf_type to a string, used for tracing.
+    static std::string v4L2BufferTypeToATraceLabel(uint32_t debugStreamId,
+                                                   const enum v4l2_buf_type type,
+                                                   const char* label);
+
     // Composes human readable string of v4l2_format.
     static std::string v4L2FormatToString(const struct v4l2_format& format);
 
@@ -374,9 +385,28 @@ public:
 
     enum class Type { kDecoder, kEncoder };
 
+    // Gets supported coding formats for |type| device and |pixelFormats|
+    static SupportedProfiles getSupportedProfiles(Type type,
+                                                  const std::vector<uint32_t>& pixelFormats);
+
+    // Gets supported levels for all decoder devices
+    static std::vector<C2Config::level_t> getSupportedDecodeLevels(VideoCodec videoCodecType);
+
+    // Get first current profile for any device
+    static C2Config::profile_t getDefaultProfile(VideoCodec codec);
+
+    // Gets first current profile for any device
+    static C2Config::level_t getDefaultLevel(VideoCodec codec);
+
+    // Gets all capabilites of the decoder devices.
+    static SupportedCapabilities queryDecodingCapabilities(VideoCodec codec);
+
+    // Gets all capabilites of the encoder devices.
+    static SupportedCapabilities queryEncodingCapabilities(VideoCodec codec);
+
     // Create and initialize an appropriate V4L2Device instance for the current platform, or return
     // nullptr if not available.
-    static scoped_refptr<V4L2Device> create();
+    static scoped_refptr<V4L2Device> create(uint32_t debugStreamId = -1);
 
     // Open a V4L2 device of |type| for use with |v4l2PixFmt|. Return true on success. The device
     // will be closed in the destructor.
@@ -391,11 +421,12 @@ public:
 
     // This method sleeps until either:
     // - SetDevicePollInterrupt() is called (on another thread),
-    // - |pollDevice| is true, and there is new data to be read from the device,
-    //   or an event from the device has arrived; in the latter case
-    //   |*eventPending| will be set to true.
+    // - |pollDevice| is true, and there is new event from the device has arrived;
+    //   in this case |*eventPending| will be set to true.
+    // - |pollBuffers| is true and |pollDevice| is true and there is new data to
+    //   be read from the device; in this case |*buffersPending| will be set to true.
     // Returns false on error, true otherwise. This method should be called from a separate thread.
-    bool poll(bool pollDevice, bool* eventPending);
+    bool poll(bool pollDevice, bool pollBuffers, bool* eventPending, bool* buffersPending);
 
     // These methods are used to interrupt the thread sleeping on poll() and force it to return
     // regardless of device state, which is usually when the client is no longer interested in what
@@ -426,21 +457,24 @@ public:
     void getSupportedResolution(uint32_t pixelFormat, ui::Size* minResolution,
                                 ui::Size* maxResolution);
 
-    std::vector<uint32_t> enumerateSupportedPixelformats(v4l2_buf_type bufType);
+    // Queries supported levels for |pixFmt| pixel format
+    std::vector<C2Config::level_t> queryC2Levels(uint32_t pixFmt);
+
+    // Queries supported profiles for |pixFmt| pixel format
+    std::vector<C2Config::profile_t> queryC2Profiles(uint32_t pixFmt);
 
-    // Return supported profiles for decoder, including only profiles for given fourcc
-    // |pixelFormats|.
-    SupportedDecodeProfiles getSupportedDecodeProfiles(const size_t numFormats,
-                                                       const uint32_t pixelFormats[]);
+    // Queries supported pixel format for a |bufType| queue type
+    std::vector<uint32_t> enumerateSupportedPixelformats(v4l2_buf_type bufType);
 
-    // Return supported profiles for encoder.
-    SupportedEncodeProfiles getSupportedEncodeProfiles();
+    // Queries supported levels for |videoCodecType|
+    std::vector<C2Config::level_t> enumerateSupportedDecodeLevels(VideoCodec videoCodecType);
 
     // Start polling on this V4L2Device. |eventCallback| will be posted to the caller's sequence if
     // a buffer is ready to be dequeued and/or a V4L2 event has been posted. |errorCallback| will
     // be posted to the client's
     // sequence if a polling error has occurred.
-    bool startPolling(android::V4L2DevicePoller::EventCallback eventCallback,
+    bool startPolling(scoped_refptr<::base::SequencedTaskRunner> taskRunner,
+                      android::V4L2DevicePoller::EventCallback eventCallback,
                       ::base::RepeatingClosure errorCallback);
     // Stop polling this V4L2Device if polling was active. No new events will be posted after this
     // method has returned.
@@ -460,21 +494,25 @@ public:
     // Check whether the V4L2 device has the specified |capabilities|.
     bool hasCapabilities(uint32_t capabilities);
 
+    // Returns identifier used for debugging purposes.
+    uint32_t getDebugStreamId() { return mDebugStreamId; }
+
 private:
     // Vector of video device node paths and corresponding pixelformats supported by each device node.
-    using Devices = std::vector<std::pair<std::string, std::vector<uint32_t>>>;
+    using DeviceInfos = std::vector<std::pair<std::string, std::vector<uint32_t>>>;
+
+    // Enumerate all V4L2 devices on the system for |type| and return them
+    static const DeviceInfos& getDeviceInfosForType(V4L2Device::Type type);
 
     friend class ::base::RefCountedThreadSafe<V4L2Device>;
-    V4L2Device();
+    V4L2Device(uint32_t debugStreamId);
     ~V4L2Device();
 
     V4L2Device(const V4L2Device&) = delete;
     V4L2Device& operator=(const V4L2Device&) = delete;
 
-    SupportedDecodeProfiles enumerateSupportedDecodeProfiles(const size_t numFormats,
-                                                             const uint32_t pixelFormats[]);
-
-    SupportedEncodeProfiles enumerateSupportedEncodeProfiles();
+    SupportedProfiles enumerateSupportedProfiles(V4L2Device::Type type,
+                                                 const std::vector<uint32_t>& pixelFormats);
 
     // Open device node for |path| as a device of |type|.
     bool openDevicePath(const std::string& path, Type type);
@@ -482,14 +520,6 @@ private:
     // Close the currently open device.
     void closeDevice();
 
-    // Enumerate all V4L2 devices on the system for |type| and store the results under
-    // mDevicesByType[type].
-    void enumerateDevicesForType(V4L2Device::Type type);
-
-    // Return device information for all devices of |type| available in the system. Enumerates and
-    // queries devices on first run and caches the results for subsequent calls.
-    const Devices& getDevicesForType(V4L2Device::Type type);
-
     // Return device node path for device of |type| supporting |pixFmt|, or an empty string if the
     // given combination is not supported by the system.
     std::string getDevicePathFor(V4L2Device::Type type, uint32_t pixFmt);
@@ -497,8 +527,8 @@ private:
     // Callback that is called upon a queue's destruction, to cleanup its pointer in mQueues.
     void onQueueDestroyed(v4l2_buf_type buf_type);
 
-    // Stores information for all devices available on the system for each device Type.
-    std::map<V4L2Device::Type, Devices> mDevicesByType;
+    // Identifier used for debugging purposes.
+    uint32_t mDebugStreamId;
 
     // The actual device fd.
     ::base::ScopedFD mDeviceFd;
@@ -518,4 +548,4 @@ private:
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_H
diff --git a/common/include/v4l2_codec2/common/V4L2DevicePoller.h b/v4l2/include/v4l2_codec2/v4l2/V4L2DevicePoller.h
similarity index 86%
rename from common/include/v4l2_codec2/common/V4L2DevicePoller.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2DevicePoller.h
index 53b4b3f59a0870364d6eb4c0e6ec8ebe1a56607b..4842088cc231f36026f7f68607f391d24e9afe3f 100644
--- a/common/include/v4l2_codec2/common/V4L2DevicePoller.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2DevicePoller.h
@@ -3,8 +3,8 @@
 // found in the LICENSE file.
 // Note: ported from Chromium commit head: f65c38dcdac2
 
-#ifndef ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_POLLER_H
-#define ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_POLLER_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_POLLER_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_POLLER_H
 
 #include <atomic>
 
@@ -36,7 +36,8 @@ public:
 
     // Create a poller for |device|, using a thread named |threadName|. Notification won't start
     // until |startPolling()| is called.
-    V4L2DevicePoller(V4L2Device* const device, const std::string& threadName);
+    V4L2DevicePoller(V4L2Device* const device, const std::string& threadName,
+                     scoped_refptr<::base::SequencedTaskRunner> taskRunner);
     ~V4L2DevicePoller();
 
     // Starts polling. |mEventCallback| will be posted on the caller's sequence every time an event
@@ -74,15 +75,12 @@ private:
     // Client sequence's task runner, where closures are posted.
     scoped_refptr<::base::SequencedTaskRunner> mClientTaskTunner;
 
-    // Since poll() returns immediately if no buffers have been queued, we cannot rely on it to
-    // pause the polling thread until an event occurs. Instead,
-    // the polling thread will wait on this WaitableEvent (signaled by |schedulePoll| before calling
-    // poll(), so we only call it when we are actually waiting for an event.
-    ::base::WaitableEvent mTriggerPoll;
     // Set to true when we wish to stop polling, instructing the poller thread to break its loop.
     std::atomic_bool mStopPolling;
+    // Set to true when we wish poll to await for QBUF/DQBUF readiness
+    std::atomic_bool mPollBuffers;
 };
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMMON_V4L2_DEVICE_POLLER_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_DEVICE_POLLER_H
diff --git a/v4l2/include/v4l2_codec2/v4l2/V4L2EncodeComponent.h b/v4l2/include/v4l2_codec2/v4l2/V4L2EncodeComponent.h
new file mode 100644
index 0000000000000000000000000000000000000000..79d99d0edcfea8aa9fbced25680935f706c9d6b7
--- /dev/null
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2EncodeComponent.h
@@ -0,0 +1,33 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODE_COMPONENT_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODE_COMPONENT_H
+
+#include <v4l2_codec2/components/EncodeComponent.h>
+
+namespace android {
+
+class V4L2EncodeComponent : public EncodeComponent {
+public:
+    // Create a new instance of the V4L2EncodeComponent.
+    static std::shared_ptr<C2Component> create(C2String name, c2_node_id_t id,
+                                               std::shared_ptr<EncodeInterface> intfImpl,
+                                               C2ComponentFactory::ComponentDeleter deleter);
+
+    virtual ~V4L2EncodeComponent() override;
+
+protected:
+    bool initializeEncoder() override;
+
+private:
+    // The number of concurrent encoder instances currently created.
+    static std::atomic<int32_t> sConcurrentInstances;
+
+    V4L2EncodeComponent(C2String name, c2_node_id_t id, std::shared_ptr<EncodeInterface> interface);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODE_COMPONENT_H
\ No newline at end of file
diff --git a/components/include/v4l2_codec2/components/V4L2Encoder.h b/v4l2/include/v4l2_codec2/v4l2/V4L2Encoder.h
similarity index 96%
rename from components/include/v4l2_codec2/components/V4L2Encoder.h
rename to v4l2/include/v4l2_codec2/v4l2/V4L2Encoder.h
index d7b55c0a3de8a3f70ecc456d6bf97c40e90b5ad8..d1eb65659d84b54a241bc4f33f1d2c437986c124 100644
--- a/components/include/v4l2_codec2/components/V4L2Encoder.h
+++ b/v4l2/include/v4l2_codec2/v4l2/V4L2Encoder.h
@@ -2,8 +2,8 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODER_H
-#define ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODER_H
+#ifndef ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODER_H
+#define ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODER_H
 
 #include <stdint.h>
 #include <memory>
@@ -27,10 +27,6 @@ class V4L2Queue;
 
 class V4L2Encoder : public VideoEncoder {
 public:
-    // Number of buffers on V4L2 device queues.
-    static constexpr size_t kInputBufferCount = 2;
-    static constexpr size_t kOutputBufferCount = 2;
-
     static std::unique_ptr<VideoEncoder> create(
             C2Config::profile_t profile, std::optional<uint8_t> level, const ui::Size& visibleSize,
             uint32_t stride, uint32_t keyFramePeriod, C2Config::bitrate_mode_t bitrateMode,
@@ -198,4 +194,4 @@ private:
 
 }  // namespace android
 
-#endif  // ANDROID_V4L2_CODEC2_COMPONENTS_V4L2_ENCODER_H
+#endif  // ANDROID_V4L2_CODEC2_V4L2_V4L2_ENCODER_H