Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fe1192ec authored by TreeHugger Robot's avatar TreeHugger Robot Committed by Android (Google) Code Review
Browse files

Merge "External camera: add device config file"

parents 27bfe5ce 53f4cb14
Loading
Loading
Loading
Loading
+3 −1
Original line number Diff line number Diff line
@@ -69,7 +69,8 @@ cc_library_shared {
    vendor: true,
    srcs: [
        "ExternalCameraDevice.cpp",
        "ExternalCameraDeviceSession.cpp"
        "ExternalCameraDeviceSession.cpp",
        "ExternalCameraUtils.cpp",
    ],
    shared_libs: [
        "libhidlbase",
@@ -91,6 +92,7 @@ cc_library_shared {
        "libyuv",
        "libjpeg",
        "libexif",
        "libtinyxml2"
    ],
    static_libs: [
        "android.hardware.camera.common@1.0-helper",
+95 −15
Original line number Diff line number Diff line
@@ -15,9 +15,10 @@
 */

#define LOG_TAG "ExtCamDev@3.4"
#define LOG_NDEBUG 0
//#define LOG_NDEBUG 0
#include <log/log.h>

#include <algorithm>
#include <array>
#include <linux/videodev2.h>
#include "android-base/macros.h"
@@ -25,7 +26,6 @@
#include "../../3.2/default/include/convert.h"
#include "ExternalCameraDevice_3_4.h"


namespace android {
namespace hardware {
namespace camera {
@@ -42,16 +42,12 @@ const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{
    V4L2_PIX_FMT_MJPEG
}}; // double braces required in C++11

// TODO: b/72261897
//       Define max size/fps this Android device can advertise (and streaming at reasonable speed)
//       Also make sure that can be done without editing source code

// TODO: b/72261675: make it dynamic since this affects memory usage
const int kMaxJpegSize = {5 * 1024 * 1024};  // 5MB
} // anonymous namespace

ExternalCameraDevice::ExternalCameraDevice(const std::string& cameraId) :
        mCameraId(cameraId) {
        mCameraId(cameraId),
        mCfg(ExternalCameraDeviceConfig::loadFromCfg()) {

    status_t ret = initCameraCharacteristics();
    if (ret != OK) {
        ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret);
@@ -133,7 +129,8 @@ Return<void> ExternalCameraDevice::open(
    }

    session = new ExternalCameraDeviceSession(
            callback, mSupportedFormats, mCameraCharacteristics, std::move(fd));
            callback, mCfg, mSupportedFormats, mCroppingType,
            mCameraCharacteristics, std::move(fd));
    if (session == nullptr) {
        ALOGE("%s: camera device session allocation failed", __FUNCTION__);
        mLock.unlock();
@@ -283,7 +280,7 @@ status_t ExternalCameraDevice::initDefaultCharsKeys(
    UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes,
           ARRAY_SIZE(jpegAvailableThumbnailSizes));

    const int32_t jpegMaxSize = kMaxJpegSize;
    const int32_t jpegMaxSize = mCfg.maxJpegBufSize;
    UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);

    const uint8_t jpegQuality = 90;
@@ -692,7 +689,7 @@ status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
#undef UPDATE

void ExternalCameraDevice::getFrameRateList(
        int fd, SupportedV4L2Format* format) {
        int fd, float fpsUpperBound, SupportedV4L2Format* format) {
    format->frameRates.clear();

    v4l2_frmivalenum frameInterval {
@@ -709,7 +706,10 @@ void ExternalCameraDevice::getFrameRateList(
            if (frameInterval.discrete.numerator != 0) {
                float framerate = frameInterval.discrete.denominator /
                        static_cast<float>(frameInterval.discrete.numerator);
                ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f",
                if (framerate > fpsUpperBound) {
                    continue;
                }
                ALOGI("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f",
                    frameInterval.index,
                    frameInterval.pixel_format & 0xFF,
                    (frameInterval.pixel_format >> 8) & 0xFF,
@@ -732,6 +732,63 @@ void ExternalCameraDevice::getFrameRateList(
    }
}

CroppingType ExternalCameraDevice::initCroppingType(
        /*inout*/std::vector<SupportedV4L2Format>* pSortedFmts) {
    std::vector<SupportedV4L2Format>& sortedFmts = *pSortedFmts;
    const auto& maxSize = sortedFmts[sortedFmts.size() - 1];
    float maxSizeAr = ASPECT_RATIO(maxSize);
    float minAr = kMaxAspectRatio;
    float maxAr = kMinAspectRatio;
    for (const auto& fmt : sortedFmts) {
        float ar = ASPECT_RATIO(fmt);
        if (ar < minAr) {
            minAr = ar;
        }
        if (ar > maxAr) {
            maxAr = ar;
        }
    }

    CroppingType ct = VERTICAL;
    if (isAspectRatioClose(maxSizeAr, maxAr)) {
        // Ex: 16:9 sensor, cropping horizontally to get to 4:3
        ct = HORIZONTAL;
    } else if (isAspectRatioClose(maxSizeAr, minAr)) {
        // Ex: 4:3 sensor, cropping vertically to get to 16:9
        ct = VERTICAL;
    } else {
        ALOGI("%s: camera maxSizeAr %f is not close to minAr %f or maxAr %f",
                __FUNCTION__, maxSizeAr, minAr, maxAr);
        if ((maxSizeAr - minAr) < (maxAr - maxSizeAr)) {
            ct = VERTICAL;
        } else {
            ct = HORIZONTAL;
        }

        // Remove formats that has aspect ratio not croppable from largest size
        std::vector<SupportedV4L2Format> out;
        for (const auto& fmt : sortedFmts) {
            float ar = ASPECT_RATIO(fmt);
            if (isAspectRatioClose(ar, maxSizeAr)) {
                out.push_back(fmt);
            } else if (ct == HORIZONTAL && ar < maxSizeAr) {
                out.push_back(fmt);
            } else if (ct == VERTICAL && ar > maxSizeAr) {
                out.push_back(fmt);
            } else {
                ALOGD("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)",
                    __FUNCTION__, fmt.width, fmt.height,
                    ct == VERTICAL ? "vertically" : "horizontally",
                    maxSize.width, maxSize.height);
            }
        }
        sortedFmts = out;
    }
    ALOGI("%s: camera croppingType is %s", __FUNCTION__,
            ct == VERTICAL ? "VERTICAL" : "HORIZONTAL");
    return ct;
}

void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
    struct v4l2_fmtdesc fmtdesc {
        .index = 0,
@@ -755,7 +812,7 @@ void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
                for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;
                        ++frameSize.index) {
                    if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
                        ALOGD("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,
                        ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,
                            fmtdesc.pixelformat & 0xFF,
                            (fmtdesc.pixelformat >> 8) & 0xFF,
                            (fmtdesc.pixelformat >> 16) & 0xFF,
@@ -771,7 +828,20 @@ void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
                            .height = frameSize.discrete.height,
                            .fourcc = fmtdesc.pixelformat
                        };
                        getFrameRateList(fd, &format);

                        float fpsUpperBound = -1.0;
                        for (const auto& limit : mCfg.fpsLimits) {
                            if (format.width <= limit.size.width &&
                                    format.height <= limit.size.height) {
                                fpsUpperBound = limit.fpsUpperBound;
                                break;
                            }
                        }
                        if (fpsUpperBound < 0.f) {
                            continue;
                        }

                        getFrameRateList(fd, fpsUpperBound, &format);
                        if (!format.frameRates.empty()) {
                            mSupportedFormats.push_back(format);
                        }
@@ -781,6 +851,16 @@ void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
        }
        fmtdesc.index++;
    }

    std::sort(mSupportedFormats.begin(), mSupportedFormats.end(),
            [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
                if (a.width == b.width) {
                    return a.height < b.height;
                }
                return a.width < b.width;
            });

    mCroppingType = initCroppingType(&mSupportedFormats);
}

}  // namespace implementation
+15 −182
Original line number Diff line number Diff line
@@ -21,9 +21,7 @@
#include "ExternalCameraDeviceSession.h"

#include "android-base/macros.h"
#include "algorithm"
#include <utils/Timers.h>
#include <cmath>
#include <linux/videodev2.h>
#include <sync/sync.h>

@@ -40,98 +38,40 @@ namespace device {
namespace V3_4 {
namespace implementation {

namespace {
// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
static constexpr size_t kMetadataMsgQueueSize = 1 << 20 /* 1MB */;
const int ExternalCameraDeviceSession::kMaxProcessedStream;
const int ExternalCameraDeviceSession::kMaxStallStream;
const Size kMaxVideoSize = {1920, 1088}; // Maybe this should be programmable
const int kNumVideoBuffers = 4; // number of v4l2 buffers when streaming <= kMaxVideoSize
const int kNumStillBuffers = 2; // number of v4l2 buffers when streaming > kMaxVideoSize
static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;

const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial
                                       // bad frames. TODO: develop a better bad frame detection
                                       // method

// Aspect ratio is defined as width/height here and ExternalCameraDevice
// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0)
#define ASPECT_RATIO(sz) (static_cast<float>((sz).width) / (sz).height)
const float kMaxAspectRatio = std::numeric_limits<float>::max();
const float kMinAspectRatio = 1.f;
} // Anonymous namespace

// Static instances
const int ExternalCameraDeviceSession::kMaxProcessedStream;
const int ExternalCameraDeviceSession::kMaxStallStream;
HandleImporter ExternalCameraDeviceSession::sHandleImporter;

bool isAspectRatioClose(float ar1, float ar2) {
    const float kAspectRatioMatchThres = 0.025f; // This threshold is good enough to distinguish
                                                // 4:3/16:9/20:9
                                                // 1.33 / 1.78 / 2
    return (std::abs(ar1 - ar2) < kAspectRatioMatchThres);
}

ExternalCameraDeviceSession::ExternalCameraDeviceSession(
        const sp<ICameraDeviceCallback>& callback,
        const std::vector<SupportedV4L2Format>& supportedFormats,
        const ExternalCameraDeviceConfig& cfg,
        const std::vector<SupportedV4L2Format>& sortedFormats,
        const CroppingType& croppingType,
        const common::V1_0::helper::CameraMetadata& chars,
        unique_fd v4l2Fd) :
        mCallback(callback),
        mCfg(cfg),
        mCameraCharacteristics(chars),
        mSupportedFormats(sortedFormats),
        mCroppingType(croppingType),
        mV4l2Fd(std::move(v4l2Fd)),
        mSupportedFormats(sortFormats(supportedFormats)),
        mCroppingType(initCroppingType(mSupportedFormats)),
        mOutputThread(new OutputThread(this, mCroppingType)),
        mMaxThumbResolution(getMaxThumbResolution()),
        mMaxJpegResolution(getMaxJpegResolution()) {
    mInitFail = initialize();
}

std::vector<SupportedV4L2Format> ExternalCameraDeviceSession::sortFormats(
            const std::vector<SupportedV4L2Format>& inFmts) {
    std::vector<SupportedV4L2Format> fmts = inFmts;
    std::sort(fmts.begin(), fmts.end(),
            [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
                if (a.width == b.width) {
                    return a.height < b.height;
                }
                return a.width < b.width;
            });
    return fmts;
}

CroppingType ExternalCameraDeviceSession::initCroppingType(
        const std::vector<SupportedV4L2Format>& sortedFmts) {
    const auto& maxSize = sortedFmts[sortedFmts.size() - 1];
    float maxSizeAr = ASPECT_RATIO(maxSize);
    float minAr = kMaxAspectRatio;
    float maxAr = kMinAspectRatio;
    for (const auto& fmt : sortedFmts) {
        float ar = ASPECT_RATIO(fmt);
        if (ar < minAr) {
            minAr = ar;
        }
        if (ar > maxAr) {
            maxAr = ar;
        }
    }

    CroppingType ct = VERTICAL;
    if (isAspectRatioClose(maxSizeAr, maxAr)) {
        // Ex: 16:9 sensor, cropping horizontally to get to 4:3
        ct = HORIZONTAL;
    } else if (isAspectRatioClose(maxSizeAr, minAr)) {
        // Ex: 4:3 sensor, cropping vertically to get to 16:9
        ct = VERTICAL;
    } else {
        ALOGI("%s: camera maxSizeAr %f is not close to minAr %f or maxAr %f",
                __FUNCTION__, maxSizeAr, minAr, maxAr);
        if ((maxSizeAr - minAr) < (maxAr - maxSizeAr)) {
            ct = VERTICAL;
        } else {
            ct = HORIZONTAL;
        }
    }
    ALOGI("%s: camera croppingType is %d", __FUNCTION__, ct);
    return ct;
}


bool ExternalCameraDeviceSession::initialize() {
    if (mV4l2Fd.get() < 0) {
        ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());
@@ -1996,8 +1936,8 @@ int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Fo
        return BAD_VALUE;
    }

    uint32_t v4lBufferCount = (v4l2Fmt.width <= kMaxVideoSize.width &&
            v4l2Fmt.height <= kMaxVideoSize.height) ? kNumVideoBuffers : kNumStillBuffers;
    uint32_t v4lBufferCount = (fps >= kDefaultFps) ?
            mCfg.numVideoBuffers : mCfg.numStillBuffers;
    // VIDIOC_REQBUFS: create buffers
    v4l2_requestbuffers req_buffers{};
    req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -2538,113 +2478,6 @@ status_t ExternalCameraDeviceSession::fillCaptureResult(
#undef ARRAY_SIZE
#undef UPDATE

V4L2Frame::V4L2Frame(
        uint32_t w, uint32_t h, uint32_t fourcc,
        int bufIdx, int fd, uint32_t dataSize) :
        mWidth(w), mHeight(h), mFourcc(fourcc),
        mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize) {}

int V4L2Frame::map(uint8_t** data, size_t* dataSize) {
    if (data == nullptr || dataSize == nullptr) {
        ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p",
                __FUNCTION__, data, dataSize);
        return -EINVAL;
    }

    Mutex::Autolock _l(mLock);
    if (!mMapped) {
        void* addr = mmap(NULL, mDataSize, PROT_READ, MAP_SHARED, mFd, 0);
        if (addr == MAP_FAILED) {
            ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno));
            return -EINVAL;
        }
        mData = static_cast<uint8_t*>(addr);
        mMapped = true;
    }
    *data = mData;
    *dataSize = mDataSize;
    ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize);
    return 0;
}

int V4L2Frame::unmap() {
    Mutex::Autolock _l(mLock);
    if (mMapped) {
        ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize);
        if (munmap(mData, mDataSize) != 0) {
            ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno));
            return -EINVAL;
        }
        mMapped = false;
    }
    return 0;
}

V4L2Frame::~V4L2Frame() {
    unmap();
}

AllocatedFrame::AllocatedFrame(
        uint32_t w, uint32_t h) :
        mWidth(w), mHeight(h), mFourcc(V4L2_PIX_FMT_YUV420) {};

AllocatedFrame::~AllocatedFrame() {}

int AllocatedFrame::allocate(YCbCrLayout* out) {
    if ((mWidth % 2) || (mHeight % 2)) {
        ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight);
        return -EINVAL;
    }

    uint32_t dataSize = mWidth * mHeight * 3 / 2; // YUV420
    if (mData.size() != dataSize) {
        mData.resize(dataSize);
    }

    if (out != nullptr) {
        out->y = mData.data();
        out->yStride = mWidth;
        uint8_t* cbStart = mData.data() + mWidth * mHeight;
        uint8_t* crStart = cbStart + mWidth * mHeight / 4;
        out->cb = cbStart;
        out->cr = crStart;
        out->cStride = mWidth / 2;
        out->chromaStep = 1;
    }
    return 0;
}

int AllocatedFrame::getLayout(YCbCrLayout* out) {
    IMapper::Rect noCrop = {0, 0,
            static_cast<int32_t>(mWidth),
            static_cast<int32_t>(mHeight)};
    return getCroppedLayout(noCrop, out);
}

int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) {
    if (out == nullptr) {
        ALOGE("%s: null out", __FUNCTION__);
        return -1;
    }
    if ((rect.left + rect.width) > static_cast<int>(mWidth) ||
        (rect.top + rect.height) > static_cast<int>(mHeight) ||
            (rect.left % 2) || (rect.top % 2) || (rect.width % 2) || (rect.height % 2)) {
        ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__,
                rect.left, rect.top, rect.width, rect.height);
        return -1;
    }

    out->y = mData.data() + mWidth * rect.top + rect.left;
    out->yStride = mWidth;
    uint8_t* cbStart = mData.data() + mWidth * mHeight;
    uint8_t* crStart = cbStart + mWidth * mHeight / 4;
    out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2;
    out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2;
    out->cStride = mWidth / 2;
    out->chromaStep = 1;
    return 0;
}

}  // namespace implementation
}  // namespace V3_4
}  // namespace device
+262 −0

File added.

Preview size limit exceeded, changes collapsed.

+7 −75
Original line number Diff line number Diff line
@@ -35,6 +35,7 @@
#include "utils/Mutex.h"
#include "utils/Thread.h"
#include "android-base/unique_fd.h"
#include "ExternalCameraUtils.h"

namespace android {
namespace hardware {
@@ -80,79 +81,12 @@ using ::android::sp;
using ::android::Mutex;
using ::android::base::unique_fd;

// TODO: put V4L2 related structs into separate header?
struct SupportedV4L2Format {
    uint32_t width;
    uint32_t height;
    uint32_t fourcc;
    // All supported frame rate for this w/h/fourcc combination
    std::vector<float> frameRates;
};

// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format)
// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue
class V4L2Frame : public virtual VirtualLightRefBase {
public:
    V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize);
    ~V4L2Frame() override;
    const uint32_t mWidth;
    const uint32_t mHeight;
    const uint32_t mFourcc;
    const int mBufferIndex; // for later enqueue
    int map(uint8_t** data, size_t* dataSize);
    int unmap();
private:
    Mutex mLock;
    const int mFd; // used for mmap but doesn't claim ownership
    const size_t mDataSize;
    uint8_t* mData = nullptr;
    bool  mMapped = false;
};

// A RAII class representing a CPU allocated YUV frame used as intermeidate buffers
// when generating output images.
class AllocatedFrame : public virtual VirtualLightRefBase {
public:
    AllocatedFrame(uint32_t w, uint32_t h); // TODO: use Size?
    ~AllocatedFrame() override;
    const uint32_t mWidth;
    const uint32_t mHeight;
    const uint32_t mFourcc; // Only support YU12 format for now
    int allocate(YCbCrLayout* out = nullptr);
    int getLayout(YCbCrLayout* out);
    int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input
private:
    Mutex mLock;
    std::vector<uint8_t> mData;
};

struct Size {
    uint32_t width;
    uint32_t height;

    bool operator==(const Size& other) const {
        return (width == other.width && height == other.height);
    }
};

struct SizeHasher {
    size_t operator()(const Size& sz) const {
        size_t result = 1;
        result = 31 * result + sz.width;
        result = 31 * result + sz.height;
        return result;
    }
};

enum CroppingType {
    HORIZONTAL = 0,
    VERTICAL = 1
};

struct ExternalCameraDeviceSession : public virtual RefBase {

    ExternalCameraDeviceSession(const sp<ICameraDeviceCallback>&,
            const std::vector<SupportedV4L2Format>& supportedFormats,
            const ExternalCameraDeviceConfig& cfg,
            const std::vector<SupportedV4L2Format>& sortedFormats,
            const CroppingType& croppingType,
            const common::V1_0::helper::CameraMetadata& chars,
            unique_fd v4l2Fd);
    virtual ~ExternalCameraDeviceSession();
@@ -238,9 +172,6 @@ protected:
    Status constructDefaultRequestSettingsRaw(RequestTemplate type,
            V3_2::CameraMetadata *outMetadata);

    static std::vector<SupportedV4L2Format> sortFormats(
            const std::vector<SupportedV4L2Format>&);
    static CroppingType initCroppingType(const std::vector<SupportedV4L2Format>&);
    bool initialize();
    Status initStatus() const;
    status_t initDefaultRequests();
@@ -346,7 +277,10 @@ protected:

    mutable Mutex mLock; // Protect all private members except otherwise noted
    const sp<ICameraDeviceCallback> mCallback;
    const ExternalCameraDeviceConfig mCfg;
    const common::V1_0::helper::CameraMetadata mCameraCharacteristics;
    const std::vector<SupportedV4L2Format> mSupportedFormats;
    const CroppingType mCroppingType;
    unique_fd mV4l2Fd;
    // device is closed either
    //    - closed by user
@@ -366,8 +300,6 @@ protected:
    std::condition_variable mV4L2BufferReturned;
    size_t mNumDequeuedV4l2Buffers = 0;

    const std::vector<SupportedV4L2Format> mSupportedFormats;
    const CroppingType mCroppingType;
    sp<OutputThread> mOutputThread;

    // Stream ID -> Camera3Stream cache
Loading