Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit cccae839 authored by TreeHugger Robot's avatar TreeHugger Robot Committed by Android (Google) Code Review
Browse files

Merge "Y16 format enablement for external provider"

parents 7883157f eed0040e
Loading
Loading
Loading
Loading
+168 −69
Original line number Diff line number Diff line
@@ -38,9 +38,8 @@ namespace {
// Other formats to consider in the future:
// * V4L2_PIX_FMT_YVU420 (== YV12)
// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{
    V4L2_PIX_FMT_MJPEG
}}; // double braces required in C++11
const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
    {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}};  // double braces required in C++11

constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
@@ -231,6 +230,13 @@ status_t ExternalCameraDevice::initCameraCharacteristics() {
            mCameraCharacteristics.clear();
            return ret;
        }

        ret = initAvailableCapabilities(&mCameraCharacteristics);
        if (ret != OK) {
            ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
            mCameraCharacteristics.clear();
            return ret;
        }
    }
    return OK;
}
@@ -244,6 +250,39 @@ do { \
  }                                                \
} while (0)

status_t ExternalCameraDevice::initAvailableCapabilities(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {

    if (mSupportedFormats.empty()) {
        ALOGE("%s: Supported formats list is empty", __FUNCTION__);
        return UNKNOWN_ERROR;
    }

    bool hasDepth = false;
    bool hasColor = false;
    for (const auto& fmt : mSupportedFormats) {
        switch (fmt.fourcc) {
            case V4L2_PIX_FMT_Z16: hasDepth = true; break;
            case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
            default: ALOGW("%s: Unsupported format found", __FUNCTION__);
        }
    }

    std::vector<uint8_t> availableCapabilities;
    if (hasDepth) {
        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
    }
    if (hasColor) {
        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
    }
    if(!availableCapabilities.empty()) {
        UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
            availableCapabilities.size());
    }

    return OK;
}

status_t ExternalCameraDevice::initDefaultCharsKeys(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
@@ -330,12 +369,6 @@ status_t ExternalCameraDevice::initDefaultCharsKeys(
           &noiseReductionMode, 1);
    UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);

    // android.request
    const uint8_t availableCapabilities[] = {
        ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE};
    UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities,
           ARRAY_SIZE(availableCapabilities));

    const int32_t partialResultCount = 1;
    UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);

@@ -544,9 +577,11 @@ status_t ExternalCameraDevice::initCameraControlsCharsKeys(int,
    return OK;
}

status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    initSupportedFormatsLocked(fd);
template <size_t SIZE>
status_t ExternalCameraDevice::initOutputCharskeysByFormat(
        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
        uint32_t fourcc, const std::array<int, SIZE>& halFormats,
        int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) {
    if (mSupportedFormats.empty()) {
        ALOGE("%s: Init supported format list failed", __FUNCTION__);
        return UNKNOWN_ERROR;
@@ -555,22 +590,17 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
    std::vector<int32_t> streamConfigurations;
    std::vector<int64_t> minFrameDurations;
    std::vector<int64_t> stallDurations;
    int32_t maxFps = std::numeric_limits<int32_t>::min();
    int32_t minFps = std::numeric_limits<int32_t>::max();
    std::set<int32_t> framerates;

    std::array<int, /*size*/3> halFormats{{
        HAL_PIXEL_FORMAT_BLOB,
        HAL_PIXEL_FORMAT_YCbCr_420_888,
        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};

    for (const auto& supportedFormat : mSupportedFormats) {
        if (supportedFormat.fourcc != fourcc) {
            // Skip 4CCs not meant for the halFormats
            continue;
        }
        for (const auto& format : halFormats) {
            streamConfigurations.push_back(format);
            streamConfigurations.push_back(supportedFormat.width);
            streamConfigurations.push_back(supportedFormat.height);
            streamConfigurations.push_back(
                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
            streamConfigurations.push_back(streamConfigTag);
        }

        int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
@@ -582,14 +612,6 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
            if (frameDuration < minFrameDuration) {
                minFrameDuration = frameDuration;
            }
            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
            if (minFps > frameRateInt) {
                minFps = frameRateInt;
            }
            if (maxFps < frameRateInt) {
                maxFps = frameRateInt;
            }
            framerates.insert(frameRateInt);
        }

        for (const auto& format : halFormats) {
@@ -613,6 +635,30 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
        }
    }

    UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size());

    UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size());

    UPDATE(stallDuration, stallDurations.data(), stallDurations.size());

    return true;
}

bool ExternalCameraDevice::calculateMinFps(
    ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    std::set<int32_t> framerates;
    int32_t minFps = std::numeric_limits<int32_t>::max();

    for (const auto& supportedFormat : mSupportedFormats) {
        for (const auto& fr : supportedFormat.frameRates) {
            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
            if (minFps > frameRateInt) {
                minFps = frameRateInt;
            }
            framerates.insert(frameRateInt);
        }
    }

    std::vector<int32_t> fpsRanges;
    // FPS ranges
    for (const auto& framerate : framerates) {
@@ -626,16 +672,59 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
    UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(),
           fpsRanges.size());

    UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
           streamConfigurations.data(), streamConfigurations.size());
    UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);

    UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
           minFrameDurations.data(), minFrameDurations.size());
    return true;
}

    UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(),
           stallDurations.size());
status_t ExternalCameraDevice::initOutputCharsKeys(
    int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
    initSupportedFormatsLocked(fd);
    if (mSupportedFormats.empty()) {
        ALOGE("%s: Init supported format list failed", __FUNCTION__);
        return UNKNOWN_ERROR;
    }

    UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
    bool hasDepth = false;
    bool hasColor = false;

    // For V4L2_PIX_FMT_Z16
    std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
    // For V4L2_PIX_FMT_MJPEG
    std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
                                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};

    for (const auto& supportedFormat : mSupportedFormats) {
        switch (supportedFormat.fourcc) {
            case V4L2_PIX_FMT_Z16:
                hasDepth = true;
                break;
            case V4L2_PIX_FMT_MJPEG:
                hasColor = true;
                break;
            default:
                ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
                      supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
                      (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
        }
    }

    if (hasDepth) {
        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
                ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
                ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
    }
    if (hasColor) {
        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
                ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
                ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
    }

    calculateMinFps(metadata);

    SupportedV4L2Format maximumFormat {.width = 0, .height = 0};
    for (const auto& supportedFormat : mSupportedFormats) {
@@ -758,11 +847,12 @@ void ExternalCameraDevice::trimSupportedFormats(
    sortedFmts = out;
}

std::vector<SupportedV4L2Format>
ExternalCameraDevice::getCandidateSupportedFormatsLocked(
std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
    int fd, CroppingType cropType,
    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
        const Size& minStreamSize) {
    const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
    const Size& minStreamSize,
    bool depthEnabled) {
    std::vector<SupportedV4L2Format> outFmts;
    struct v4l2_fmtdesc fmtdesc {
        .index = 0,
@@ -808,6 +898,25 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
                            .fourcc = fmtdesc.pixelformat
                        };

                        if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
                            updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
                        } else {
                            updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
                        }
                    }
                }
            }
        }
        fmtdesc.index++;
    }
    trimSupportedFormats(cropType, &outFmts);
    return outFmts;
}

void ExternalCameraDevice::updateFpsBounds(
    int fd, CroppingType cropType,
    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, SupportedV4L2Format format,
    std::vector<SupportedV4L2Format>& outFmts) {
    double fpsUpperBound = -1.0;
    for (const auto& limit : fpsLimits) {
        if (cropType == VERTICAL) {
@@ -821,10 +930,9 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
                break;
            }
        }

    }
    if (fpsUpperBound < 0.f) {
                            continue;
        return;
    }

    getFrameRateList(fd, fpsUpperBound, &format);
@@ -832,21 +940,12 @@ ExternalCameraDevice::getCandidateSupportedFormatsLocked(
        outFmts.push_back(format);
    }
}
                }
            }
        }
        fmtdesc.index++;
    }
    trimSupportedFormats(cropType, &outFmts);
    return outFmts;
}

void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {

    std::vector<SupportedV4L2Format> horizontalFmts =
            getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.minStreamSize);
    std::vector<SupportedV4L2Format> verticalFmts =
            getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.minStreamSize);
    std::vector<SupportedV4L2Format> horizontalFmts = getCandidateSupportedFormatsLocked(
        fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
    std::vector<SupportedV4L2Format> verticalFmts = getCandidateSupportedFormatsLocked(
        fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);

    size_t horiSize = horizontalFmts.size();
    size_t vertSize = verticalFmts.size();
+47 −32
Original line number Diff line number Diff line
@@ -1819,7 +1819,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
        return false;
    };

    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) {
    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
        return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
                req->frameIn->mFourcc & 0xFF,
                (req->frameIn->mFourcc >> 8) & 0xFF,
@@ -1844,17 +1844,13 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
    }

    // TODO: in some special case maybe we can decode jpg directly to gralloc output?
    if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
        ATRACE_BEGIN("MJPGtoI420");
    res = libyuv::MJPGToI420(
            inData, inDataSize,
            static_cast<uint8_t*>(mYu12FrameLayout.y),
            mYu12FrameLayout.yStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cb),
            mYu12FrameLayout.cStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cr),
            mYu12FrameLayout.cStride,
            mYu12Frame->mWidth, mYu12Frame->mHeight,
            mYu12Frame->mWidth, mYu12Frame->mHeight);
        int res = libyuv::MJPGToI420(
            inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
            static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride,
            mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight);
        ATRACE_END();

        if (res != 0) {
@@ -1868,6 +1864,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
            signalRequestDone();
            return true;
        }
    }

    ATRACE_BEGIN("Wait for BufferRequest done");
    res = waitForBufferRequestDone(&req->buffers);
@@ -1910,6 +1907,16 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
                          __FUNCTION__, ret);
                }
            } break;
            case PixelFormat::Y16: {
                void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);

                std::memcpy(outLayout, inData, inDataSize);

                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
                if (relFence >= 0) {
                    halBuf.acquireFence = relFence;
                }
            } break;
            case PixelFormat::YCBCR_420_888:
            case PixelFormat::YV12: {
                IMapper::Rect outRect {0, 0,
@@ -2164,7 +2171,8 @@ void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec<BufferCache>
}

bool ExternalCameraDeviceSession::isSupported(const Stream& stream,
        const std::vector<SupportedV4L2Format>& supportedFormats) {
        const std::vector<SupportedV4L2Format>& supportedFormats,
        const ExternalCameraConfig& devCfg) {
    int32_t ds = static_cast<int32_t>(stream.dataSpace);
    PixelFormat fmt = stream.format;
    uint32_t width = stream.width;
@@ -2181,11 +2189,6 @@ bool ExternalCameraDeviceSession::isSupported(const Stream& stream,
        return false;
    }

    if (ds & Dataspace::DEPTH) {
        ALOGI("%s: does not support depth output", __FUNCTION__);
        return false;
    }

    switch (fmt) {
        case PixelFormat::BLOB:
            if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) {
@@ -2199,6 +2202,16 @@ bool ExternalCameraDeviceSession::isSupported(const Stream& stream,
            // TODO: check what dataspace we can support here.
            // intentional no-ops.
            break;
        case PixelFormat::Y16:
            if (!devCfg.depthEnabled) {
                ALOGI("%s: Depth is not Enabled", __FUNCTION__);
                return false;
            }
            if (!(ds & Dataspace::DEPTH)) {
                ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
                return false;
            }
            break;
        default:
            ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
            return false;
@@ -2544,7 +2557,8 @@ void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) {

Status ExternalCameraDeviceSession::isStreamCombinationSupported(
        const V3_2::StreamConfiguration& config,
        const std::vector<SupportedV4L2Format>& supportedFormats) {
        const std::vector<SupportedV4L2Format>& supportedFormats,
        const ExternalCameraConfig& devCfg) {
    if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) {
        ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode);
        return Status::ILLEGAL_ARGUMENT;
@@ -2559,7 +2573,7 @@ Status ExternalCameraDeviceSession::isStreamCombinationSupported(
    int numStallStream = 0;
    for (const auto& stream : config.streams) {
        // Check if the format/width/height combo is supported
        if (!isSupported(stream, supportedFormats)) {
        if (!isSupported(stream, supportedFormats, devCfg)) {
            return Status::ILLEGAL_ARGUMENT;
        }
        if (stream.format == PixelFormat::BLOB) {
@@ -2590,7 +2604,7 @@ Status ExternalCameraDeviceSession::configureStreams(
        uint32_t blobBufferSize) {
    ATRACE_CALL();

    Status status = isStreamCombinationSupported(config, mSupportedFormats);
    Status status = isStreamCombinationSupported(config, mSupportedFormats, mCfg);
    if (status != Status::OK) {
        return status;
    }
@@ -2744,6 +2758,7 @@ Status ExternalCameraDeviceSession::configureStreams(
            case PixelFormat::BLOB:
            case PixelFormat::YCBCR_420_888:
            case PixelFormat::YV12: // Used by SurfaceTexture
            case PixelFormat::Y16:
                // No override
                out->streams[i].v3_2.overrideFormat = config.streams[i].format;
                break;
+52 −20
Original line number Diff line number Diff line
@@ -21,7 +21,6 @@
#include <sys/mman.h>
#include <linux/videodev2.h>
#include "ExternalCameraUtils.h"
#include "tinyxml2.h" // XML parsing

namespace android {
namespace hardware {
@@ -245,28 +244,28 @@ ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
    if (fpsList == nullptr) {
        ALOGI("%s: no fps list specified", __FUNCTION__);
    } else {
        std::vector<FpsLimitation> limits;
        XMLElement *row = fpsList->FirstChildElement("Limit");
        while (row != nullptr) {
            FpsLimitation prevLimit {{0, 0}, 1000.0};
            FpsLimitation limit;
            limit.size = {
                row->UnsignedAttribute("width", /*Default*/0),
                row->UnsignedAttribute("height", /*Default*/0)};
            limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/1000.0);
            if (limit.size.width <= prevLimit.size.width ||
                    limit.size.height <= prevLimit.size.height ||
                    limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
                ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
                        " Prev %dx%d@%f, Current %dx%d@%f", __FUNCTION__,
                        prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
                        limit.size.width, limit.size.height, limit.fpsUpperBound);
        if (!updateFpsList(fpsList, ret.fpsLimits)) {
            return ret;
        }
    }

    XMLElement *depth = deviceCfg->FirstChildElement("Depth16Supported");
    if (depth == nullptr) {
        ret.depthEnabled = false;
        ALOGI("%s: depth output is not enabled", __FUNCTION__);
    } else {
        ret.depthEnabled = depth->BoolAttribute("enabled", false);
    }

    if(ret.depthEnabled) {
        XMLElement *depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
        if (depthFpsList == nullptr) {
            ALOGW("%s: no depth fps list specified", __FUNCTION__);
        } else {
            if(!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
                return ret;
            }
            limits.push_back(limit);
            row = row->NextSiblingElement("Limit");
        }
        ret.fpsLimits = limits;
    }

    XMLElement *minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
@@ -293,15 +292,48 @@ ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
        ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__,
                limit.size.width, limit.size.height, limit.fpsUpperBound);
    }
    for (const auto& limit : ret.depthFpsLimits) {
        ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
              limit.fpsUpperBound);
    }
    ALOGI("%s: minStreamSize: %dx%d" , __FUNCTION__,
         ret.minStreamSize.width, ret.minStreamSize.height);
    return ret;
}

bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
        std::vector<FpsLimitation>& fpsLimits) {
    using namespace tinyxml2;
    std::vector<FpsLimitation> limits;
    XMLElement* row = fpsList->FirstChildElement("Limit");
    while (row != nullptr) {
        FpsLimitation prevLimit{{0, 0}, 1000.0};
        FpsLimitation limit;
        limit.size = {row->UnsignedAttribute("width", /*Default*/ 0),
                      row->UnsignedAttribute("height", /*Default*/ 0)};
        limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/ 1000.0);
        if (limit.size.width <= prevLimit.size.width ||
            limit.size.height <= prevLimit.size.height ||
            limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
            ALOGE(
                "%s: FPS limit list must have increasing size and decreasing fps!"
                " Prev %dx%d@%f, Current %dx%d@%f",
                __FUNCTION__, prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
                limit.size.width, limit.size.height, limit.fpsUpperBound);
            return false;
        }
        limits.push_back(limit);
        row = row->NextSiblingElement("Limit");
    }
    fpsLimits = limits;
    return true;
}

ExternalCameraConfig::ExternalCameraConfig() :
        maxJpegBufSize(kDefaultJpegBufSize),
        numVideoBuffers(kDefaultNumVideoBuffer),
        numStillBuffers(kDefaultNumStillBuffer),
        depthEnabled(false),
        orientation(kDefaultOrientation) {
    fpsLimits.push_back({/*Size*/{ 640,  480}, /*FPS upper bound*/30.0});
    fpsLimits.push_back({/*Size*/{1280,  720}, /*FPS upper bound*/7.5});
+4 −2
Original line number Diff line number Diff line
@@ -194,7 +194,8 @@ protected:
    int v4l2StreamOffLocked();
    int setV4l2FpsLocked(double fps);
    static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config,
            const std::vector<SupportedV4L2Format>& supportedFormats);
            const std::vector<SupportedV4L2Format>& supportedFormats,
            const ExternalCameraConfig& devCfg);

    // TODO: change to unique_ptr for better tracking
    sp<V4L2Frame> dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs); // Called with mLock hold
@@ -202,7 +203,8 @@ protected:

    // Check if input Stream is one of supported stream setting on this device
    static bool isSupported(const Stream& stream,
            const std::vector<SupportedV4L2Format>& supportedFormats);
            const std::vector<SupportedV4L2Format>& supportedFormats,
            const ExternalCameraConfig& cfg);

    // Validate and import request's output buffers and acquire fence
    virtual Status importRequestLocked(
+21 −1
Original line number Diff line number Diff line
@@ -104,6 +104,9 @@ protected:

    // Calls into virtual member function. Do not use it in constructor
    status_t initCameraCharacteristics();
    // Init available capabilities keys
    status_t initAvailableCapabilities(
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
    // Init non-device dependent keys
    virtual status_t initDefaultCharsKeys(
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
@@ -114,13 +117,30 @@ protected:
    status_t initOutputCharsKeys(int fd,
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);

    // Helper function for initOutputCharskeys
    template <size_t SIZE>
    status_t initOutputCharskeysByFormat(
            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*,
            uint32_t fourcc, const std::array<int, SIZE>& formats,
            int scaler_stream_config_tag,
            int stream_configuration, int min_frame_duration, int stall_duration);

    bool calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);

    static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);

    static void updateFpsBounds(int fd, CroppingType cropType,
            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
            SupportedV4L2Format format,
            std::vector<SupportedV4L2Format>& outFmts);

    // Get candidate supported formats list of input cropping type.
    static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
            int fd, CroppingType cropType,
            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
            const Size& minStreamSize);
            const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
            const Size& minStreamSize,
            bool depthEnabled);
    // Trim supported format list by the cropping type. Also sort output formats by width/height
    static void trimSupportedFormats(CroppingType cropType,
            /*inout*/std::vector<SupportedV4L2Format>* pFmts);
Loading