Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e5c53f07 authored by Android Build Coastguard Worker's avatar Android Build Coastguard Worker
Browse files

Snap for 7992035 from ab37c680 to sc-v2-release

Change-Id: Ib48892247f06358b27692baf214a6f14162afdb0
parents c605a50c ab37c680
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -43,5 +43,5 @@ interface ICameraServiceProxy
     * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_180},
     * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_270}).
     */
    int getRotateAndCropOverride(String packageName, int lensFacing);
    int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
}
+7 −4
Original line number Diff line number Diff line
@@ -3600,7 +3600,8 @@ typedef enum acamera_metadata_tag {
     * YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
     * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
     * <p>For applications targeting SDK version 31 or newer, if the mobile device declares to be
     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">media performance class</a> S,
     * media performance class 12 or higher by setting
     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
     * the primary camera devices (first rear/front camera in the camera ID list) will not
     * support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
     * smaller than 1080p, the camera device will round up the JPEG image size to at least
@@ -3618,9 +3619,11 @@ typedef enum acamera_metadata_tag {
     * YUV_420_888    | all output sizes available for FULL hardware level, up to the maximum video size | LIMITED        |
     * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
     * <p>For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
     * to be media performance class S, or if the camera device isn't a primary rear/front
     * camera, the minimum required output stream configurations are the same as for applications
     * targeting SDK version older than 31.</p>
     * to be media performance class 12 or better by setting
     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
     * or if the camera device isn't a primary rear/front camera, the minimum required output
     * stream configurations are the same as for applications targeting SDK version older than
     * 31.</p>
     * <p>Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
     * mandatory stream configurations on a per-capability basis.</p>
     * <p>Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability for
+45 −37
Original line number Diff line number Diff line
@@ -1455,6 +1455,16 @@ status_t CCodecBufferChannel::requestInitialInputBuffers() {
    std::list<std::unique_ptr<C2Work>> flushedConfigs;
    mFlushedConfigs.lock()->swap(flushedConfigs);
    if (!flushedConfigs.empty()) {
        {
            Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
            PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
            for (const std::unique_ptr<C2Work> &work : flushedConfigs) {
                watcher->onWorkQueued(
                        work->input.ordinal.frameIndex.peeku(),
                        std::vector(work->input.buffers),
                        now);
            }
        }
        err = mComponent->queue(&flushedConfigs);
        if (err != C2_OK) {
            ALOGW("[%s] Error while queueing a flushed config", mName);
@@ -1521,31 +1531,33 @@ void CCodecBufferChannel::release() {
    setDescrambler(nullptr);
}


void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
    ALOGV("[%s] flush", mName);
    std::vector<uint64_t> indices;
    std::list<std::unique_ptr<C2Work>> configs;
    mInput.lock()->lastFlushIndex = mFrameIndex.load(std::memory_order_relaxed);
    {
        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
        for (const std::unique_ptr<C2Work> &work : flushedWork) {
        indices.push_back(work->input.ordinal.frameIndex.peeku());
            uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
            if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
                watcher->onWorkDone(frameIndex);
                continue;
            }
            if (work->input.buffers.empty()
                    || work->input.buffers.front() == nullptr
                    || work->input.buffers.front()->data().linearBlocks().empty()) {
                ALOGD("[%s] no linear codec config data found", mName);
                watcher->onWorkDone(frameIndex);
                continue;
            }
            std::unique_ptr<C2Work> copy(new C2Work);
        copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
            copy->input.flags = C2FrameData::flags_t(
                    work->input.flags | C2FrameData::FLAG_DROP_FRAME);
            copy->input.ordinal = work->input.ordinal;
            copy->input.ordinal.frameIndex = mFrameIndex++;
        copy->input.buffers.insert(
                copy->input.buffers.begin(),
                work->input.buffers.begin(),
                work->input.buffers.end());
            for (size_t i = 0; i < work->input.buffers.size(); ++i) {
                copy->input.buffers.push_back(watcher->onInputBufferReleased(frameIndex, i));
            }
            for (const std::unique_ptr<C2Param> &param : work->input.configUpdate) {
                copy->input.configUpdate.push_back(C2Param::Copy(*param));
            }
@@ -1555,8 +1567,10 @@ void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushe
                    work->input.infoBuffers.end());
            copy->worklets.emplace_back(new C2Worklet);
            configs.push_back(std::move(copy));
            watcher->onWorkDone(frameIndex);
            ALOGV("[%s] stashed flushed codec config data", mName);
        }
    }
    mFlushedConfigs.lock()->swap(configs);
    {
        Mutexed<Input>::Locked input(mInput);
@@ -1570,12 +1584,6 @@ void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushe
            output->buffers->flushStash();
        }
    }
    {
        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
        for (uint64_t index : indices) {
            watcher->onWorkDone(index);
        }
    }
}

void CCodecBufferChannel::onWorkDone(
+9 −5
Original line number Diff line number Diff line
@@ -1835,8 +1835,10 @@ Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8&
        if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
            client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
        } else if (effectiveApiLevel == API_2) {
            client->setRotateAndCropOverride(CameraServiceProxyWrapper::getRotateAndCropOverride(
                    clientPackageName, facing));

          client->setRotateAndCropOverride(
              CameraServiceProxyWrapper::getRotateAndCropOverride(
                  clientPackageName, facing, multiuser_get_user_id(clientUid)));
        }

        // Set camera muting behavior
@@ -2219,7 +2221,9 @@ Status CameraService::notifyDisplayConfigurationChange() {
            if (basicClient.get() != nullptr && basicClient->canCastToApiClient(API_2)) {
              basicClient->setRotateAndCropOverride(
                  CameraServiceProxyWrapper::getRotateAndCropOverride(
                            basicClient->getPackageName(), basicClient->getCameraFacing()));
                      basicClient->getPackageName(),
                      basicClient->getCameraFacing(),
                      multiuser_get_user_id(basicClient->getClientUid())));
            }
        }
    }
+2 −1
Original line number Diff line number Diff line
@@ -78,7 +78,8 @@ status_t JpegProcessor::updateStream(const Parameters &params) {
    }

    // Find out buffer size for JPEG
    ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
    ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(String8("")),
            params.pictureWidth, params.pictureHeight);
    if (maxJpegSize <= 0) {
        ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
                __FUNCTION__, mId, maxJpegSize);
Loading