Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e0768654 authored by Emil Jahshan's avatar Emil Jahshan Committed by android-build-merger
Browse files

Merge "Y16 format enablement for external provider" am: 44968dc6

am: 0e8d2cb6

Change-Id: Iff5ae7adfec1b047ca400cb0e018987d1b31f7ad
parents 094a06c2 0e8d2cb6
Loading
Loading
Loading
Loading
+168 −69
Original line number Diff line number Diff line
@@ -38,9 +38,8 @@ namespace {
// Other formats to consider in the future:
// * V4L2_PIX_FMT_YVU420 (== YV12)
// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{
    V4L2_PIX_FMT_MJPEG
}}; // double braces required in C++11
const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
    {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}};  // double braces required in C++11

constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
@@ -224,6 +223,13 @@ status_t ExternalCameraDevice::initCameraCharacteristics() {
            mCameraCharacteristics.clear();
            return ret;
        }

        ret = initAvailableCapabilities(&mCameraCharacteristics);
        if (ret != OK) {
            ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
            mCameraCharacteristics.clear();
            return ret;
        }
    }
    return OK;
}
@@ -237,6 +243,39 @@ do { \
  }                                                \
} while (0)

status_t ExternalCameraDevice::initAvailableCapabilities(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {

    if (mSupportedFormats.empty()) {
        ALOGE("%s: Supported formats list is empty", __FUNCTION__);
        return UNKNOWN_ERROR;
    }

    bool hasDepth = false;
    bool hasColor = false;
    for (const auto& fmt : mSupportedFormats) {
        switch (fmt.fourcc) {
            case V4L2_PIX_FMT_Z16: hasDepth = true; break;
            case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
            default: ALOGW("%s: Unsupported format found", __FUNCTION__);
        }
    }

    std::vector<uint8_t> availableCapabilities;
    if (hasDepth) {
        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
    }
    if (hasColor) {
        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
    }
    if(!availableCapabilities.empty()) {
        UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
            availableCapabilities.size());
    }

    return OK;
}

status_t ExternalCameraDevice::initDefaultCharsKeys(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
@@ -323,12 +362,6 @@ status_t ExternalCameraDevice::initDefaultCharsKeys(
           &noiseReductionMode, 1);
    UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);

    // android.request
    const uint8_t availableCapabilities[] = {
        ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE};
    UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities,
           ARRAY_SIZE(availableCapabilities));

    const int32_t partialResultCount = 1;
    UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);

@@ -576,9 +609,11 @@ status_t ExternalCameraDevice::initCameraControlsCharsKeys(int,
    return OK;
}

status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    initSupportedFormatsLocked(fd);
template <size_t SIZE>
status_t ExternalCameraDevice::initOutputCharskeysByFormat(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
        uint32_t fourcc, const std::array<int, SIZE>& halFormats,
        int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) {
    if (mSupportedFormats.empty()) {
        ALOGE("%s: Init supported format list failed", __FUNCTION__);
        return UNKNOWN_ERROR;
@@ -587,22 +622,17 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
    std::vector<int32_t> streamConfigurations;
    std::vector<int64_t> minFrameDurations;
    std::vector<int64_t> stallDurations;
    int32_t maxFps = std::numeric_limits<int32_t>::min();
    int32_t minFps = std::numeric_limits<int32_t>::max();
    std::set<int32_t> framerates;

    std::array<int, /*size*/3> halFormats{{
        HAL_PIXEL_FORMAT_BLOB,
        HAL_PIXEL_FORMAT_YCbCr_420_888,
        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};

    for (const auto& supportedFormat : mSupportedFormats) {
        if (supportedFormat.fourcc != fourcc) {
            // Skip 4CCs not meant for the halFormats
            continue;
        }
        for (const auto& format : halFormats) {
            streamConfigurations.push_back(format);
            streamConfigurations.push_back(supportedFormat.width);
            streamConfigurations.push_back(supportedFormat.height);
            streamConfigurations.push_back(
                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
            streamConfigurations.push_back(streamConfigTag);
        }

        int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
@@ -614,14 +644,6 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
            if (frameDuration < minFrameDuration) {
                minFrameDuration = frameDuration;
            }
            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
            if (minFps > frameRateInt) {
                minFps = frameRateInt;
            }
            if (maxFps < frameRateInt) {
                maxFps = frameRateInt;
            }
            framerates.insert(frameRateInt);
        }

        for (const auto& format : halFormats) {
@@ -645,6 +667,30 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
        }
    }

    UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size());

    UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size());

    UPDATE(stallDuration, stallDurations.data(), stallDurations.size());

    return true;
}

bool ExternalCameraDevice::calculateMinFps(
    ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    std::set<int32_t> framerates;
    int32_t minFps = std::numeric_limits<int32_t>::max();

    for (const auto& supportedFormat : mSupportedFormats) {
        for (const auto& fr : supportedFormat.frameRates) {
            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
            if (minFps > frameRateInt) {
                minFps = frameRateInt;
            }
            framerates.insert(frameRateInt);
        }
    }

    std::vector<int32_t> fpsRanges;
    // FPS ranges
    for (const auto& framerate : framerates) {
@@ -658,16 +704,59 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
    UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(),
           fpsRanges.size());

    UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
           streamConfigurations.data(), streamConfigurations.size());
    UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);

    UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
           minFrameDurations.data(), minFrameDurations.size());
    return true;
}

    UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(),
           stallDurations.size());
status_t ExternalCameraDevice::initOutputCharsKeys(
    int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    initSupportedFormatsLocked(fd);
    if (mSupportedFormats.empty()) {
        ALOGE("%s: Init supported format list failed", __FUNCTION__);
        return UNKNOWN_ERROR;
    }

    UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
    bool hasDepth = false;
    bool hasColor = false;

    // For V4L2_PIX_FMT_Z16
    std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
    // For V4L2_PIX_FMT_MJPEG
    std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
                                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};

    for (const auto& supportedFormat : mSupportedFormats) {
        switch (supportedFormat.fourcc) {
            case V4L2_PIX_FMT_Z16:
                hasDepth = true;
                break;
            case V4L2_PIX_FMT_MJPEG:
                hasColor = true;
                break;
            default:
                ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
                      supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
                      (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
        }
    }

    if (hasDepth) {
        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
                ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
    }
    if (hasColor) {
        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
                ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
                ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
    }

    calculateMinFps(metadata);

    SupportedV4L2Format maximumFormat {.width = 0, .height = 0};
    for (const auto& supportedFormat : mSupportedFormats) {
@@ -790,11 +879,12 @@ void ExternalCameraDevice::trimSupportedFormats(
    sortedFmts = out;
}

std::vector<SupportedV4L2Format>
ExternalCameraDevice::getCandidateSupportedFormatsLocked(
std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
    int fd, CroppingType cropType,
    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
        const Size& minStreamSize) {
    const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
    const Size& minStreamSize,
    bool depthEnabled) {
    std::vector<SupportedV4L2Format> outFmts;
    struct v4l2_fmtdesc fmtdesc {
        .index = 0,
@@ -840,6 +930,25 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
                            .fourcc = fmtdesc.pixelformat
                        };

                        if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
                            updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
                        } else {
                            updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
                        }
                    }
                }
            }
        }
        fmtdesc.index++;
    }
    trimSupportedFormats(cropType, &outFmts);
    return outFmts;
}

void ExternalCameraDevice::updateFpsBounds(
    int fd, CroppingType cropType,
    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, SupportedV4L2Format format,
    std::vector<SupportedV4L2Format>& outFmts) {
    double fpsUpperBound = -1.0;
    for (const auto& limit : fpsLimits) {
        if (cropType == VERTICAL) {
@@ -853,10 +962,9 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
                break;
            }
        }

    }
    if (fpsUpperBound < 0.f) {
                            continue;
        return;
    }

    getFrameRateList(fd, fpsUpperBound, &format);
@@ -864,21 +972,12 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
        outFmts.push_back(format);
    }
}
                }
            }
        }
        fmtdesc.index++;
    }
    trimSupportedFormats(cropType, &outFmts);
    return outFmts;
}

void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {

    std::vector<SupportedV4L2Format> horizontalFmts =
            getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.minStreamSize);
    std::vector<SupportedV4L2Format> verticalFmts =
            getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.minStreamSize);
    std::vector<SupportedV4L2Format> horizontalFmts = getCandidateSupportedFormatsLocked(
        fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
    std::vector<SupportedV4L2Format> verticalFmts = getCandidateSupportedFormatsLocked(
        fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);

    size_t horiSize = horizontalFmts.size();
    size_t vertSize = verticalFmts.size();
+41 −28
Original line number Diff line number Diff line
@@ -1724,7 +1724,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
        return false;
    };

    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) {
    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
        return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
                req->frameIn->mFourcc & 0xFF,
                (req->frameIn->mFourcc >> 8) & 0xFF,
@@ -1743,17 +1743,13 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
    }

    // TODO: in some special case maybe we can decode jpg directly to gralloc output?
    if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
        ATRACE_BEGIN("MJPGtoI420");
        int res = libyuv::MJPGToI420(
            inData, inDataSize,
            static_cast<uint8_t*>(mYu12FrameLayout.y),
            mYu12FrameLayout.yStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cb),
            mYu12FrameLayout.cStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cr),
            mYu12FrameLayout.cStride,
            mYu12Frame->mWidth, mYu12Frame->mHeight,
            mYu12Frame->mWidth, mYu12Frame->mHeight);
            inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride,
            mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight);
        ATRACE_END();

        if (res != 0) {
@@ -1767,6 +1763,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
            signalRequestDone();
            return true;
        }
    }

    ALOGV("%s processing new request", __FUNCTION__);
    const int kSyncWaitTimeoutMs = 500;
@@ -1796,6 +1793,16 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
                          __FUNCTION__, ret);
                }
            } break;
            case PixelFormat::Y16: {
                void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);

                std::memcpy(outLayout, inData, inDataSize);

                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
                if (relFence >= 0) {
                    halBuf.acquireFence = relFence;
                }
            } break;
            case PixelFormat::YCBCR_420_888:
            case PixelFormat::YV12: {
                IMapper::Rect outRect {0, 0,
@@ -2063,11 +2070,6 @@ bool ExternalCameraDeviceSession::isSupported(const Stream& stream) {
        return false;
    }

    if (ds & Dataspace::DEPTH) {
        ALOGI("%s: does not support depth output", __FUNCTION__);
        return false;
    }

    switch (fmt) {
        case PixelFormat::BLOB:
            if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) {
@@ -2081,6 +2083,16 @@ bool ExternalCameraDeviceSession::isSupported(const Stream& stream) {
            // TODO: check what dataspace we can support here.
            // intentional no-ops.
            break;
        case PixelFormat::Y16:
            if (!mCfg.depthEnabled) {
                ALOGI("%s: Depth is not Enabled", __FUNCTION__);
                return false;
            }
            if (!(ds & Dataspace::DEPTH)) {
                ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
                return false;
            }
            break;
        default:
            ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
            return false;
@@ -2609,6 +2621,7 @@ Status ExternalCameraDeviceSession::configureStreams(
            case PixelFormat::BLOB:
            case PixelFormat::YCBCR_420_888:
            case PixelFormat::YV12: // Used by SurfaceTexture
            case PixelFormat::Y16:
                // No override
                out->streams[i].v3_2.overrideFormat = config.streams[i].format;
                break;
+53 −21
Original line number Diff line number Diff line
@@ -21,7 +21,6 @@
#include <sys/mman.h>
#include <linux/videodev2.h>
#include "ExternalCameraUtils.h"
#include "tinyxml2.h" // XML parsing

namespace android {
namespace hardware {
@@ -243,28 +242,28 @@ ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
    if (fpsList == nullptr) {
        ALOGI("%s: no fps list specified", __FUNCTION__);
    } else {
        std::vector<FpsLimitation> limits;
        XMLElement *row = fpsList->FirstChildElement("Limit");
        while (row != nullptr) {
            FpsLimitation prevLimit {{0, 0}, 1000.0};
            FpsLimitation limit;
            limit.size = {
                row->UnsignedAttribute("width", /*Default*/0),
                row->UnsignedAttribute("height", /*Default*/0)};
            limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/1000.0);
            if (limit.size.width <= prevLimit.size.width ||
                    limit.size.height <= prevLimit.size.height ||
                    limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
                ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
                        " Prev %dx%d@%f, Current %dx%d@%f", __FUNCTION__,
                        prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
                        limit.size.width, limit.size.height, limit.fpsUpperBound);
        if (!updateFpsList(fpsList, ret.fpsLimits)) {
            return ret;
        }
    }

    XMLElement *depth = deviceCfg->FirstChildElement("Depth16Supported");
    if (depth == nullptr) {
        ret.depthEnabled = false;
        ALOGI("%s: depth output is not enabled", __FUNCTION__);
    } else {
        ret.depthEnabled = depth->BoolAttribute("enabled", false);
    }

    if(ret.depthEnabled) {
        XMLElement *depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
        if (depthFpsList == nullptr) {
            ALOGW("%s: no depth fps list specified", __FUNCTION__);
        } else {
            if(!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
                return ret;
            }
            limits.push_back(limit);
            row = row->NextSiblingElement("Limit");
        }
        ret.fpsLimits = limits;
    }

    XMLElement *minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
@@ -284,15 +283,48 @@ ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
        ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__,
                limit.size.width, limit.size.height, limit.fpsUpperBound);
    }
    for (const auto& limit : ret.depthFpsLimits) {
        ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
              limit.fpsUpperBound);
    }
    ALOGI("%s: minStreamSize: %dx%d" , __FUNCTION__,
         ret.minStreamSize.width, ret.minStreamSize.height);
    return ret;
}

bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
        std::vector<FpsLimitation>& fpsLimits) {
    using namespace tinyxml2;
    std::vector<FpsLimitation> limits;
    XMLElement* row = fpsList->FirstChildElement("Limit");
    while (row != nullptr) {
        FpsLimitation prevLimit{{0, 0}, 1000.0};
        FpsLimitation limit;
        limit.size = {row->UnsignedAttribute("width", /*Default*/ 0),
                      row->UnsignedAttribute("height", /*Default*/ 0)};
        limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/ 1000.0);
        if (limit.size.width <= prevLimit.size.width ||
            limit.size.height <= prevLimit.size.height ||
            limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
            ALOGE(
                "%s: FPS limit list must have increasing size and decreasing fps!"
                " Prev %dx%d@%f, Current %dx%d@%f",
                __FUNCTION__, prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
                limit.size.width, limit.size.height, limit.fpsUpperBound);
            return false;
        }
        limits.push_back(limit);
        row = row->NextSiblingElement("Limit");
    }
    fpsLimits = limits;
    return true;
}

ExternalCameraConfig::ExternalCameraConfig() :
        maxJpegBufSize(kDefaultJpegBufSize),
        numVideoBuffers(kDefaultNumVideoBuffer),
        numStillBuffers(kDefaultNumStillBuffer) {
        numStillBuffers(kDefaultNumStillBuffer),
        depthEnabled(false) {
    fpsLimits.push_back({/*Size*/{ 640,  480}, /*FPS upper bound*/30.0});
    fpsLimits.push_back({/*Size*/{1280,  720}, /*FPS upper bound*/7.5});
    fpsLimits.push_back({/*Size*/{1920, 1080}, /*FPS upper bound*/5.0});
+21 −1
Original line number Diff line number Diff line
@@ -82,6 +82,9 @@ protected:
    void initSupportedFormatsLocked(int fd);

    status_t initCameraCharacteristics();
    // Init available capabilities keys
    status_t initAvailableCapabilities(
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
    // Init non-device dependent keys
    status_t initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
    // Init camera control chars keys. Caller still owns fd
@@ -91,13 +94,30 @@ protected:
    status_t initOutputCharsKeys(int fd,
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);

    // Helper function for initOutputCharskeys
    template <size_t SIZE>
    status_t initOutputCharskeysByFormat(
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*,
            uint32_t fourcc, const std::array<int, SIZE>& formats,
            int scaler_stream_config_tag,
            int stream_configuration, int min_frame_duration, int stall_duration);

    bool calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);

    static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);

    static void updateFpsBounds(int fd, CroppingType cropType,
            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
            SupportedV4L2Format format,
            std::vector<SupportedV4L2Format>& outFmts);

    // Get candidate supported formats list of input cropping type.
    static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
            int fd, CroppingType cropType,
            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
            const Size& minStreamSize);
            const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
            const Size& minStreamSize,
            bool depthEnabled);
    // Trim supported format list by the cropping type. Also sort output formats by width/height
    static void trimSupportedFormats(CroppingType cropType,
            /*inout*/std::vector<SupportedV4L2Format>* pFmts);
+9 −3
Original line number Diff line number Diff line
@@ -17,12 +17,13 @@
#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H
#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H

#include <android/hardware/graphics/mapper/2.0/IMapper.h>
#include <inttypes.h>
#include "utils/LightRefBase.h"
#include <mutex>
#include <vector>
#include <unordered_set>
#include <android/hardware/graphics/mapper/2.0/IMapper.h>
#include <vector>
#include "tinyxml2.h"  // XML parsing
#include "utils/LightRefBase.h"

using android::hardware::graphics::mapper::V2_0::IMapper;
using android::hardware::graphics::mapper::V2_0::YCbCrLayout;
@@ -71,17 +72,22 @@ struct ExternalCameraConfig {
    // Size of v4l2 buffer queue when streaming > kMaxVideoSize
    uint32_t numStillBuffers;

    // Indication that the device connected supports depth output
    bool depthEnabled;

    struct FpsLimitation {
        Size size;
        double fpsUpperBound;
    };
    std::vector<FpsLimitation> fpsLimits;
    std::vector<FpsLimitation> depthFpsLimits;

    // Minimum output stream size
    Size minStreamSize;

private:
    ExternalCameraConfig();
    static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector<FpsLimitation>& fpsLimits);
};

} // common