Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fdb43e49 authored by TreeHugger Robot's avatar TreeHugger Robot Committed by Android (Google) Code Review
Browse files

Merge "Merge qt-r1-dev-plus-aosp-without-vendor (5817612) into...

Merge "Merge qt-r1-dev-plus-aosp-without-vendor (5817612) into stage-aosp-master" into stage-aosp-master
parents ee14f823 cfaf477e
Loading
Loading
Loading
Loading
+3 −1
Original line number Diff line number Diff line
@@ -38,8 +38,10 @@ namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv
namespace.platform.isolated = true

namespace.platform.search.paths  = /system/${LIB}
namespace.platform.search.paths += /apex/com.android.runtime/${LIB}
namespace.platform.asan.search.paths  = /data/asan/system/${LIB}
namespace.platform.asan.search.paths +=           /system/${LIB}
namespace.platform.asan.search.paths += /apex/com.android.runtime/${LIB}

# /system/lib/libc.so, etc are symlinks to /apex/com.android.lib/lib/bionic/libc.so, etc.
# Add /apex/... pat to the permitted paths because linker uses realpath(3)
+1 −1
Original line number Diff line number Diff line
@@ -138,6 +138,7 @@ class CameraDevice final : public RefBase {

  private:
    friend ACameraCaptureSession;
    friend ACameraDevice;

    camera_status_t checkCameraClosedOrErrorLocked() const;

@@ -387,7 +388,6 @@ struct ACameraDevice {
            mDevice(new android::acam::CameraDevice(id, cb, std::move(chars), this)) {}

    ~ACameraDevice();

    /*******************
     * NDK public APIs *
     *******************/
+55 −28
Original line number Diff line number Diff line
@@ -24,6 +24,7 @@
#include <algorithm>
#include <mutex>
#include <string>
#include <variant>
#include <vector>
#include <stdio.h>
#include <stdio.h>
@@ -49,6 +50,7 @@ static constexpr int kTestImageHeight = 480;
static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;

using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
using ConfiguredWindows = std::set<native_handle_t *>;

class CameraHelper {
   public:
@@ -60,9 +62,12 @@ class CameraHelper {
        const char* physicalCameraId;
        native_handle_t* anw;
    };
    int initCamera(native_handle_t* imgReaderAnw,

    // Retaining the error code in case the caller needs to analyze it.
    std::variant<int, ConfiguredWindows> initCamera(native_handle_t* imgReaderAnw,
            const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
            bool usePhysicalSettings) {
        ConfiguredWindows configuredWindows;
        if (imgReaderAnw == nullptr) {
            ALOGE("Cannot initialize camera before image reader get initialized.");
            return -1;
@@ -78,7 +83,7 @@ class CameraHelper {
        ret = ACameraManager_openCamera(mCameraManager, mCameraId, &mDeviceCb, &mDevice);
        if (ret != AMEDIA_OK || mDevice == nullptr) {
            ALOGE("Failed to open camera, ret=%d, mDevice=%p.", ret, mDevice);
            return -1;
            return ret;
        }

        // Create capture session
@@ -97,8 +102,9 @@ class CameraHelper {
            ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
            return ret;
        }

        configuredWindows.insert(mImgReaderAnw);
        std::vector<const char*> idPointerList;
        std::set<const native_handle_t*> physicalStreamMap;
        for (auto& physicalStream : physicalImgReaders) {
            ACaptureSessionOutput* sessionOutput = nullptr;
            ret = ACaptureSessionPhysicalOutput_create(physicalStream.anw,
@@ -112,21 +118,25 @@ class CameraHelper {
                ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
                return ret;
            }
            mExtraOutputs.push_back(sessionOutput);
            ret = ACameraDevice_isSessionConfigurationSupported(mDevice, mOutputs);
            if (ret != ACAMERA_OK && ret != ACAMERA_ERROR_UNSUPPORTED_OPERATION) {
                ALOGW("ACameraDevice_isSessionConfigurationSupported failed, ret=%d camera id %s",
                      ret, mCameraId);
                ACaptureSessionOutputContainer_remove(mOutputs, sessionOutput);
                ACaptureSessionOutput_free(sessionOutput);
                continue;
            }
            configuredWindows.insert(physicalStream.anw);
            // Assume that at most one physical stream per physical camera.
            mPhysicalCameraIds.push_back(physicalStream.physicalCameraId);
            idPointerList.push_back(physicalStream.physicalCameraId);
            physicalStreamMap.insert(physicalStream.anw);
            mSessionPhysicalOutputs.push_back(sessionOutput);
        }
        ACameraIdList cameraIdList;
        cameraIdList.numCameras = idPointerList.size();
        cameraIdList.cameraIds = idPointerList.data();

        ret = ACameraDevice_isSessionConfigurationSupported(mDevice, mOutputs);
        if (ret != ACAMERA_OK && ret != ACAMERA_ERROR_UNSUPPORTED_OPERATION) {
            ALOGE("ACameraDevice_isSessionConfigurationSupported failed, ret=%d", ret);
            return ret;
        }

        ret = ACameraDevice_createCaptureSession(mDevice, mOutputs, &mSessionCb, &mSession);
        if (ret != AMEDIA_OK) {
            ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
@@ -157,6 +167,10 @@ class CameraHelper {
        }

        for (auto& physicalStream : physicalImgReaders) {
            if (physicalStreamMap.find(physicalStream.anw) == physicalStreamMap.end()) {
                ALOGI("Skipping physicalStream anw=%p", physicalStream.anw);
                continue;
            }
            ACameraOutputTarget* outputTarget = nullptr;
            ret = ACameraOutputTarget_create(physicalStream.anw, &outputTarget);
            if (ret != AMEDIA_OK) {
@@ -168,11 +182,11 @@ class CameraHelper {
                ALOGE("ACaptureRequest_addTarget failed, ret=%d", ret);
                return ret;
            }
            mReqExtraOutputs.push_back(outputTarget);
            mReqPhysicalOutputs.push_back(outputTarget);
        }

        mIsCameraReady = true;
        return 0;
        return configuredWindows;
    }


@@ -184,10 +198,10 @@ class CameraHelper {
            ACameraOutputTarget_free(mReqImgReaderOutput);
            mReqImgReaderOutput = nullptr;
        }
        for (auto& outputTarget : mReqExtraOutputs) {
        for (auto& outputTarget : mReqPhysicalOutputs) {
            ACameraOutputTarget_free(outputTarget);
        }
        mReqExtraOutputs.clear();
        mReqPhysicalOutputs.clear();
        if (mStillRequest) {
            ACaptureRequest_free(mStillRequest);
            mStillRequest = nullptr;
@@ -201,10 +215,10 @@ class CameraHelper {
            ACaptureSessionOutput_free(mImgReaderOutput);
            mImgReaderOutput = nullptr;
        }
        for (auto& extraOutput : mExtraOutputs) {
        for (auto& extraOutput : mSessionPhysicalOutputs) {
            ACaptureSessionOutput_free(extraOutput);
        }
        mExtraOutputs.clear();
        mSessionPhysicalOutputs.clear();
        if (mOutputs) {
            ACaptureSessionOutputContainer_free(mOutputs);
            mOutputs = nullptr;
@@ -262,13 +276,13 @@ class CameraHelper {
    // Capture session
    ACaptureSessionOutputContainer* mOutputs = nullptr;
    ACaptureSessionOutput* mImgReaderOutput = nullptr;
    std::vector<ACaptureSessionOutput*> mExtraOutputs;
    std::vector<ACaptureSessionOutput*> mSessionPhysicalOutputs;

    ACameraCaptureSession* mSession = nullptr;
    // Capture request
    ACaptureRequest* mStillRequest = nullptr;
    ACameraOutputTarget* mReqImgReaderOutput = nullptr;
    std::vector<ACameraOutputTarget*> mReqExtraOutputs;
    std::vector<ACameraOutputTarget*> mReqPhysicalOutputs;

    bool mIsCameraReady = false;
    const char* mCameraId;
@@ -581,9 +595,11 @@ class AImageReaderVendorTest : public ::testing::Test {
        }

        CameraHelper cameraHelper(id, mCameraManager);
        ret = cameraHelper.initCamera(testCase.getNativeWindow(),
                {}/*physicalImageReaders*/, false/*usePhysicalSettings*/);
        if (ret < 0) {
        std::variant<int, ConfiguredWindows> retInit =
                cameraHelper.initCamera(testCase.getNativeWindow(), {}/*physicalImageReaders*/,
                                        false/*usePhysicalSettings*/);
        int *retp = std::get_if<int>(&retInit);
        if (retp) {
            ALOGE("Unable to initialize camera helper");
            return false;
        }
@@ -751,10 +767,15 @@ class AImageReaderVendorTest : public ::testing::Test {
        physicalImgReaderInfo.push_back({physicalCameraIds[0], testCases[1]->getNativeWindow()});
        physicalImgReaderInfo.push_back({physicalCameraIds[1], testCases[2]->getNativeWindow()});

        int ret = cameraHelper.initCamera(testCases[0]->getNativeWindow(),
                physicalImgReaderInfo, usePhysicalSettings);
        ASSERT_EQ(ret, 0);

        std::variant<int, ConfiguredWindows> retInit =
                cameraHelper.initCamera(testCases[0]->getNativeWindow(), physicalImgReaderInfo,
                                        usePhysicalSettings);
        int *retp = std::get_if<int>(&retInit);
        ASSERT_EQ(retp, nullptr);
        ConfiguredWindows *configuredWindowsp = std::get_if<ConfiguredWindows>(&retInit);
        ASSERT_NE(configuredWindowsp, nullptr);
        ASSERT_LE(configuredWindowsp->size(), testCases.size());
        int ret = 0;
        if (!cameraHelper.isCameraReady()) {
            ALOGW("Camera is not ready after successful initialization. It's either due to camera "
                  "on board lacks BACKWARDS_COMPATIBLE capability or the device does not have "
@@ -776,9 +797,15 @@ class AImageReaderVendorTest : public ::testing::Test {
                break;
            }
        }
        ASSERT_EQ(testCases[0]->getAcquiredImageCount(), pictureCount);
        ASSERT_EQ(testCases[1]->getAcquiredImageCount(), pictureCount);
        ASSERT_EQ(testCases[2]->getAcquiredImageCount(), pictureCount);
        for(auto &testCase : testCases) {
            auto it = configuredWindowsp->find(testCase->getNativeWindow());
            if (it == configuredWindowsp->end()) {
                continue;
            }
            ALOGI("Testing window %p", testCase->getNativeWindow());
            ASSERT_EQ(testCase->getAcquiredImageCount(), pictureCount);
        }

        ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));

        ACameraMetadata_free(staticMetadata);
+42 −26
Original line number Diff line number Diff line
@@ -157,7 +157,7 @@ C2SoftAacEnc::C2SoftAacEnc(
      mSentCodecSpecificData(false),
      mInputTimeSet(false),
      mInputSize(0),
      mInputTimeUs(0),
      mNextFrameTimestampUs(0),
      mSignalledError(false),
      mOutIndex(0u) {
}
@@ -183,7 +183,7 @@ c2_status_t C2SoftAacEnc::onStop() {
    mSentCodecSpecificData = false;
    mInputTimeSet = false;
    mInputSize = 0u;
    mInputTimeUs = 0;
    mNextFrameTimestampUs = 0;
    mSignalledError = false;
    return C2_OK;
}
@@ -201,7 +201,7 @@ c2_status_t C2SoftAacEnc::onFlush_sm() {
    mSentCodecSpecificData = false;
    mInputTimeSet = false;
    mInputSize = 0u;
    mInputTimeUs = 0;
    mNextFrameTimestampUs = 0;
    return C2_OK;
}

@@ -365,17 +365,18 @@ void C2SoftAacEnc::process(
        capacity = view.capacity();
    }
    if (!mInputTimeSet && capacity > 0) {
        mInputTimeUs = work->input.ordinal.timestamp;
        mNextFrameTimestampUs = work->input.ordinal.timestamp;
        mInputTimeSet = true;
    }

    size_t numFrames = (capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
            / mNumBytesPerInputFrame;
    ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu mNumBytesPerInputFrame = %u",
          capacity, mInputSize, numFrames, mNumBytesPerInputFrame);
    ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu "
          "mNumBytesPerInputFrame = %u inputTS = %lld",
          capacity, mInputSize, numFrames,
          mNumBytesPerInputFrame, work->input.ordinal.timestamp.peekll());

    std::shared_ptr<C2LinearBlock> block;
    std::shared_ptr<C2Buffer> buffer;
    std::unique_ptr<C2WriteView> wView;
    uint8_t *outPtr = temp;
    size_t outAvailable = 0u;
@@ -442,7 +443,11 @@ void C2SoftAacEnc::process(
        const std::shared_ptr<C2Buffer> mBuffer;
    };

    C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
    struct OutputBuffer {
        std::shared_ptr<C2Buffer> buffer;
        c2_cntr64_t timestampUs;
    };
    std::list<OutputBuffer> outputBuffers;

    while (encoderErr == AACENC_OK && inargs.numInSamples > 0) {
        if (numFrames && !block) {
@@ -473,29 +478,22 @@ void C2SoftAacEnc::process(
                                  &outargs);

        if (encoderErr == AACENC_OK) {
            if (buffer) {
                outOrdinal.frameIndex = mOutIndex++;
                outOrdinal.timestamp = mInputTimeUs;
                cloneAndSend(
                        inputIndex,
                        work,
                        FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
                buffer.reset();
            }

            if (outargs.numOutBytes > 0) {
                mInputSize = 0;
                int consumed = (capacity / sizeof(int16_t)) - inargs.numInSamples
                        + outargs.numInSamples;
                mInputTimeUs = work->input.ordinal.timestamp
                c2_cntr64_t currentFrameTimestampUs = mNextFrameTimestampUs;
                mNextFrameTimestampUs = work->input.ordinal.timestamp
                        + (consumed * 1000000ll / channelCount / sampleRate);
                buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
                std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
#if defined(LOG_NDEBUG) && !LOG_NDEBUG
                hexdump(outPtr, std::min(outargs.numOutBytes, 256));
#endif
                outPtr = temp;
                outAvailable = 0;
                block.reset();

                outputBuffers.push_back({buffer, currentFrameTimestampUs});
            } else {
                mInputSize += outargs.numInSamples * sizeof(int16_t);
            }
@@ -506,8 +504,9 @@ void C2SoftAacEnc::process(
                inargs.numInSamples -= outargs.numInSamples;
            }
        }
        ALOGV("encoderErr = %d mInputSize = %zu inargs.numInSamples = %d, mInputTimeUs = %lld",
              encoderErr, mInputSize, inargs.numInSamples, mInputTimeUs.peekll());
        ALOGV("encoderErr = %d mInputSize = %zu "
              "inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
              encoderErr, mInputSize, inargs.numInSamples, mNextFrameTimestampUs.peekll());
    }

    if (eos && inBufferSize[0] > 0) {
@@ -542,10 +541,27 @@ void C2SoftAacEnc::process(
                           &outargs);
    }

    outOrdinal.frameIndex = mOutIndex++;
    outOrdinal.timestamp = mInputTimeUs;
    while (outputBuffers.size() > 1) {
        const OutputBuffer& front = outputBuffers.front();
        C2WorkOrdinalStruct ordinal = work->input.ordinal;
        ordinal.frameIndex = mOutIndex++;
        ordinal.timestamp = front.timestampUs;
        cloneAndSend(
                inputIndex,
                work,
                FillWork(C2FrameData::FLAG_INCOMPLETE, ordinal, front.buffer));
        outputBuffers.pop_front();
    }
    std::shared_ptr<C2Buffer> buffer;
    C2WorkOrdinalStruct ordinal = work->input.ordinal;
    ordinal.frameIndex = mOutIndex++;
    if (!outputBuffers.empty()) {
        ordinal.timestamp = outputBuffers.front().timestampUs;
        buffer = outputBuffers.front().buffer;
    }
    // Mark the end of frame
    FillWork((C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0),
             outOrdinal, buffer)(work);
             ordinal, buffer)(work);
}

c2_status_t C2SoftAacEnc::drain(
@@ -569,7 +585,7 @@ c2_status_t C2SoftAacEnc::drain(
    mSentCodecSpecificData = false;
    mInputTimeSet = false;
    mInputSize = 0u;
    mInputTimeUs = 0;
    mNextFrameTimestampUs = 0;

    // TODO: we don't have any pending work at this time to drain.
    return C2_OK;
+1 −1
Original line number Diff line number Diff line
@@ -56,7 +56,7 @@ private:
    bool mSentCodecSpecificData;
    bool mInputTimeSet;
    size_t mInputSize;
    c2_cntr64_t mInputTimeUs;
    c2_cntr64_t mNextFrameTimestampUs;

    bool mSignalledError;
    std::atomic_uint64_t mOutIndex;
Loading