Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4df59e77 authored by Chong Zhang's avatar Chong Zhang Committed by Android (Google) Code Review
Browse files

Merge "Decode one row of tiles at a time for image that has tiles" into pi-dev

parents d79fe0de 0c1407f7
Loading
Loading
Loading
Loading
+4 −0
Original line number Diff line number Diff line
@@ -37,9 +37,11 @@ public:
    // will calculate frame buffer size if |hasData| is set to true.
    VideoFrame(uint32_t width, uint32_t height,
            uint32_t displayWidth, uint32_t displayHeight,
            uint32_t tileWidth, uint32_t tileHeight,
            uint32_t angle, uint32_t bpp, bool hasData, size_t iccSize):
        mWidth(width), mHeight(height),
        mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
        mTileWidth(tileWidth), mTileHeight(tileHeight),
        mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
        mSize(hasData ? (bpp * width * height) : 0),
        mIccSize(iccSize), mReserved(0) {
@@ -74,6 +76,8 @@ public:
    uint32_t mHeight;          // Decoded image height before rotation
    uint32_t mDisplayWidth;    // Display width before rotation
    uint32_t mDisplayHeight;   // Display height before rotation
    uint32_t mTileWidth;       // Tile width (0 if image doesn't have grid)
    uint32_t mTileHeight;      // Tile height (0 if image doesn't have grid)
    int32_t  mRotationAngle;   // Rotation angle, clockwise, should be multiple of 90
    uint32_t mBytesPerPixel;   // Number of bytes per pixel
    uint32_t mRowBytes;        // Number of bytes per row before rotation
+134 −19
Original line number Diff line number Diff line
@@ -271,17 +271,43 @@ status_t HeifDataSource::getSize(off64_t* size) {

/////////////////////////////////////////////////////////////////////////

struct HeifDecoderImpl::DecodeThread : public Thread {
    explicit DecodeThread(HeifDecoderImpl *decoder) : mDecoder(decoder) {}

private:
    HeifDecoderImpl* mDecoder;

    bool threadLoop();

    DISALLOW_EVIL_CONSTRUCTORS(DecodeThread);
};

bool HeifDecoderImpl::DecodeThread::threadLoop() {
    return mDecoder->decodeAsync();
}

/////////////////////////////////////////////////////////////////////////

HeifDecoderImpl::HeifDecoderImpl() :
    // output color format should always be set via setOutputColor(), in case
    // it's not, default to HAL_PIXEL_FORMAT_RGB_565.
    mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
    mCurScanline(0),
    mWidth(0),
    mHeight(0),
    mFrameDecoded(false),
    mHasImage(false),
    mHasVideo(false) {
    mHasVideo(false),
    mAvailableLines(0),
    mNumSlices(1),
    mSliceHeight(0),
    mAsyncDecodeDone(false) {
}

HeifDecoderImpl::~HeifDecoderImpl() {
    if (mThread != nullptr) {
        mThread->join();
    }
}

bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
@@ -310,22 +336,23 @@ bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {

    mHasImage = hasImage && !strcasecmp(hasImage, "yes");
    mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
    sp<IMemory> sharedMem;
    if (mHasImage) {
        // image index < 0 to retrieve primary image
        mFrameMemory = mRetriever->getImageAtIndex(
        sharedMem = mRetriever->getImageAtIndex(
                -1, mOutputColor, true /*metaOnly*/);
    } else if (mHasVideo) {
        mFrameMemory = mRetriever->getFrameAtTime(0,
        sharedMem = mRetriever->getFrameAtTime(0,
                MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
                mOutputColor, true /*metaOnly*/);
    }

    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
    if (sharedMem == nullptr || sharedMem->pointer() == nullptr) {
        ALOGE("getFrameAtTime: videoFrame is a nullptr");
        return false;
    }

    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
    VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->pointer());

    ALOGV("Meta dimension %dx%d, display %dx%d, angle %d, iccSize %d",
            videoFrame->mWidth,
@@ -344,6 +371,14 @@ bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
                videoFrame->mIccSize,
                videoFrame->getFlattenedIccData());
    }
    mWidth = videoFrame->mWidth;
    mHeight = videoFrame->mHeight;
    if (mHasImage && videoFrame->mTileHeight >= 512 && mWidth >= 3000 && mHeight >= 2000 ) {
        // Try decoding in slices only if the image has tiles and is big enough.
        mSliceHeight = videoFrame->mTileHeight;
        mNumSlices = (videoFrame->mHeight + mSliceHeight - 1) / mSliceHeight;
        ALOGV("mSliceHeight %u, mNumSlices %zu", mSliceHeight, mNumSlices);
    }
    return true;
}

@@ -376,6 +411,36 @@ bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
    return false;
}

bool HeifDecoderImpl::decodeAsync() {
    for (size_t i = 1; i < mNumSlices; i++) {
        ALOGV("decodeAsync(): decoding slice %zu", i);
        size_t top = i * mSliceHeight;
        size_t bottom = (i + 1) * mSliceHeight;
        if (bottom > mHeight) {
            bottom = mHeight;
        }
        sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
                -1, mOutputColor, 0, top, mWidth, bottom);
        {
            Mutex::Autolock autolock(mLock);

            if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
                mAsyncDecodeDone = true;
                mScanlineReady.signal();
                break;
            }
            mFrameMemory = frameMemory;
            mAvailableLines = bottom;
            ALOGV("decodeAsync(): available lines %zu", mAvailableLines);
            mScanlineReady.signal();
        }
    }
    // Aggressive clear to avoid holding on to resources
    mRetriever.clear();
    mDataSource.clear();
    return false;
}

bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
    // reset scanline pointer
    mCurScanline = 0;
@@ -384,6 +449,47 @@ bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
        return true;
    }

    // See if we want to decode in slices to allow client to start
    // scanline processing in parallel with decode. If this fails
    // we fallback to decoding the full frame.
    if (mHasImage && mNumSlices > 1) {
        // get first slice and metadata
        sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
                -1, mOutputColor, 0, 0, mWidth, mSliceHeight);

        if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
            ALOGE("decode: metadata is a nullptr");
            return false;
        }

        VideoFrame* videoFrame = static_cast<VideoFrame*>(frameMemory->pointer());

        if (frameInfo != nullptr) {
            frameInfo->set(
                    videoFrame->mWidth,
                    videoFrame->mHeight,
                    videoFrame->mRotationAngle,
                    videoFrame->mBytesPerPixel,
                    videoFrame->mIccSize,
                    videoFrame->getFlattenedIccData());
        }

        mFrameMemory = frameMemory;
        mAvailableLines = mSliceHeight;
        mThread = new DecodeThread(this);
        if (mThread->run("HeifDecode", ANDROID_PRIORITY_FOREGROUND) == OK) {
            mFrameDecoded = true;
            return true;
        }

        // Fallback to decode without slicing
        mThread.clear();
        mNumSlices = 1;
        mSliceHeight = 0;
        mAvailableLines = 0;
        mFrameMemory.clear();
    }

    if (mHasImage) {
        // image index < 0 to retrieve primary image
        mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
@@ -393,14 +499,14 @@ bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
    }

    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
        ALOGE("getFrameAtTime: videoFrame is a nullptr");
        ALOGE("decode: videoFrame is a nullptr");
        return false;
    }

    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
    if (videoFrame->mSize == 0 ||
            mFrameMemory->size() < videoFrame->getFlattenedSize()) {
        ALOGE("getFrameAtTime: videoFrame size is invalid");
        ALOGE("decode: videoFrame size is invalid");
        return false;
    }

@@ -424,36 +530,45 @@ bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
    }
    mFrameDecoded = true;

    // Aggressive clear to avoid holding on to resources
    // Aggressively clear to avoid holding on to resources
    mRetriever.clear();
    mDataSource.clear();
    return true;
}

bool HeifDecoderImpl::getScanline(uint8_t* dst) {
bool HeifDecoderImpl::getScanlineInner(uint8_t* dst) {
    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
        return false;
    }
    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
    if (mCurScanline >= videoFrame->mHeight) {
        ALOGE("no more scanline available");
        return false;
    }
    uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mWidth);
    return true;
}

size_t HeifDecoderImpl::skipScanlines(size_t count) {
    if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
        return 0;
bool HeifDecoderImpl::getScanline(uint8_t* dst) {
    if (mCurScanline >= mHeight) {
        ALOGE("no more scanline available");
        return false;
    }

    if (mNumSlices > 1) {
        Mutex::Autolock autolock(mLock);

        while (!mAsyncDecodeDone && mCurScanline >= mAvailableLines) {
            mScanlineReady.wait(mLock);
        }
        return (mCurScanline < mAvailableLines) ? getScanlineInner(dst) : false;
    }

    return getScanlineInner(dst);
}
    VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());

size_t HeifDecoderImpl::skipScanlines(size_t count) {
    uint32_t oldScanline = mCurScanline;
    mCurScanline += count;
    if (mCurScanline > videoFrame->mHeight) {
        mCurScanline = videoFrame->mHeight;
    if (mCurScanline > mHeight) {
        mCurScanline = mHeight;
    }
    return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
}
+18 −0
Original line number Diff line number Diff line
@@ -19,6 +19,8 @@

#include "include/HeifDecoderAPI.h"
#include <system/graphics.h>
#include <utils/Condition.h>
#include <utils/Mutex.h>
#include <utils/RefBase.h>

namespace android {
@@ -49,14 +51,30 @@ public:
    size_t skipScanlines(size_t count) override;

private:
    struct DecodeThread;

    sp<IDataSource> mDataSource;
    sp<MediaMetadataRetriever> mRetriever;
    sp<IMemory> mFrameMemory;
    android_pixel_format_t mOutputColor;
    size_t mCurScanline;
    uint32_t mWidth;
    uint32_t mHeight;
    bool mFrameDecoded;
    bool mHasImage;
    bool mHasVideo;

    // Slice decoding only
    Mutex mLock;
    Condition mScanlineReady;
    sp<DecodeThread> mThread;
    size_t mAvailableLines;
    size_t mNumSlices;
    uint32_t mSliceHeight;
    bool mAsyncDecodeDone;

    bool decodeAsync();
    bool getScanlineInner(uint8_t* dst);
};

} // namespace android
+53 −0
Original line number Diff line number Diff line
@@ -69,6 +69,7 @@ enum {
    SET_DATA_SOURCE_CALLBACK,
    GET_FRAME_AT_TIME,
    GET_IMAGE_AT_INDEX,
    GET_IMAGE_RECT_AT_INDEX,
    GET_FRAME_AT_INDEX,
    EXTRACT_ALBUM_ART,
    EXTRACT_METADATA,
@@ -187,6 +188,30 @@ public:
        return interface_cast<IMemory>(reply.readStrongBinder());
    }

    sp<IMemory> getImageRectAtIndex(
            int index, int colorFormat, int left, int top, int right, int bottom)
    {
        ALOGV("getImageRectAtIndex: index %d, colorFormat(%d) rect {%d, %d, %d, %d}",
                index, colorFormat, left, top, right, bottom);
        Parcel data, reply;
        data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
        data.writeInt32(index);
        data.writeInt32(colorFormat);
        data.writeInt32(left);
        data.writeInt32(top);
        data.writeInt32(right);
        data.writeInt32(bottom);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
        sendSchedPolicy(data);
#endif
        remote()->transact(GET_IMAGE_RECT_AT_INDEX, data, &reply);
        status_t ret = reply.readInt32();
        if (ret != NO_ERROR) {
            return NULL;
        }
        return interface_cast<IMemory>(reply.readStrongBinder());
    }

    status_t getFrameAtIndex(std::vector<sp<IMemory> > *frames,
            int frameIndex, int numFrames, int colorFormat, bool metaOnly)
    {
@@ -375,6 +400,34 @@ status_t BnMediaMetadataRetriever::onTransact(
#endif
            return NO_ERROR;
        } break;

        case GET_IMAGE_RECT_AT_INDEX: {
            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
            int index = data.readInt32();
            int colorFormat = data.readInt32();
            int left = data.readInt32();
            int top = data.readInt32();
            int right = data.readInt32();
            int bottom = data.readInt32();
            ALOGV("getImageRectAtIndex: index(%d), colorFormat(%d), rect {%d, %d, %d, %d}",
                    index, colorFormat, left, top, right, bottom);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
            setSchedPolicy(data);
#endif
            sp<IMemory> bitmap = getImageRectAtIndex(
                    index, colorFormat, left, top, right, bottom);
            if (bitmap != 0) {  // Don't send NULL across the binder interface
                reply->writeInt32(NO_ERROR);
                reply->writeStrongBinder(IInterface::asBinder(bitmap));
            } else {
                reply->writeInt32(UNKNOWN_ERROR);
            }
#ifndef DISABLE_GROUP_SCHEDULE_HACK
            restoreSchedPolicy();
#endif
            return NO_ERROR;
        } break;

        case GET_FRAME_AT_INDEX: {
            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
            int frameIndex = data.readInt32();
+2 −0
Original line number Diff line number Diff line
@@ -46,6 +46,8 @@ public:
            int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
    virtual sp<IMemory>     getImageAtIndex(
            int index, int colorFormat, bool metaOnly, bool thumbnail) = 0;
    virtual sp<IMemory>     getImageRectAtIndex(
            int index, int colorFormat, int left, int top, int right, int bottom) = 0;
    virtual status_t        getFrameAtIndex(
            std::vector<sp<IMemory> > *frames,
            int frameIndex, int numFrames, int colorFormat, bool metaOnly) = 0;
Loading