Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 9b770726 authored by Treehugger Robot's avatar Treehugger Robot Committed by Gerrit Code Review
Browse files

Merge changes from topics "nnapi-canonical-burst", "nnapi-updatable-drivers"

* changes:
  Relocate NN burst utility to ExecutionBurstUtils
  Relocate ExecutionBurst* classes to NN util code
  Implement partial canonical Burst in NN util code
  Introduce canonical IBurst object in NNAPI -- hal
  Add isUpdatable to NNAPI canonical IDevice -- hal
parents 89d91f80 8fc48961
Loading
Loading
Loading
Loading
+55 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2020 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H

#include <nnapi/IBurst.h>
#include <nnapi/IPreparedModel.h>
#include <nnapi/Result.h>
#include <nnapi/Types.h>

#include <memory>
#include <optional>
#include <utility>

// See hardware/interfaces/neuralnetworks/utils/README.md for more information on HIDL interface
// lifetimes across processes and for protecting asynchronous calls across HIDL.

namespace android::hardware::neuralnetworks::V1_0::utils {

// Class that adapts nn::IPreparedModel to nn::IBurst.
class Burst final : public nn::IBurst {
    struct PrivateConstructorTag {};

  public:
    static nn::GeneralResult<std::shared_ptr<const Burst>> create(
            nn::SharedPreparedModel preparedModel);

    Burst(PrivateConstructorTag tag, nn::SharedPreparedModel preparedModel);

    OptionalCacheHold cacheMemory(const nn::Memory& memory) const override;

    nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
            const nn::Request& request, nn::MeasureTiming measure) const override;

  private:
    const nn::SharedPreparedModel kPreparedModel;
};

}  // namespace android::hardware::neuralnetworks::V1_0::utils

#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H
+1 −0
Original line number Diff line number Diff line
@@ -52,6 +52,7 @@ class Device final : public nn::IDevice {
    const std::string& getVersionString() const override;
    nn::Version getFeatureLevel() const override;
    nn::DeviceType getType() const override;
    bool isUpdatable() const override;
    const std::vector<nn::Extension>& getSupportedExtensions() const override;
    const nn::Capabilities& getCapabilities() const override;
    std::pair<uint32_t, uint32_t> getNumberOfCacheFilesNeeded() const override;
+4 −1
Original line number Diff line number Diff line
@@ -35,7 +35,8 @@
namespace android::hardware::neuralnetworks::V1_0::utils {

// Class that adapts V1_0::IPreparedModel to nn::IPreparedModel.
class PreparedModel final : public nn::IPreparedModel {
class PreparedModel final : public nn::IPreparedModel,
                            public std::enable_shared_from_this<PreparedModel> {
    struct PrivateConstructorTag {};

  public:
@@ -56,6 +57,8 @@ class PreparedModel final : public nn::IPreparedModel {
            const nn::OptionalDuration& loopTimeoutDuration,
            const nn::OptionalDuration& timeoutDurationAfterFence) const override;

    nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;

    std::any getUnderlyingResource() const override;

  private:
+55 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2020 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include "Burst.h"

#include <android-base/logging.h>
#include <nnapi/IBurst.h>
#include <nnapi/IPreparedModel.h>
#include <nnapi/Result.h>
#include <nnapi/Types.h>

#include <memory>
#include <optional>
#include <utility>

namespace android::hardware::neuralnetworks::V1_0::utils {

nn::GeneralResult<std::shared_ptr<const Burst>> Burst::create(
        nn::SharedPreparedModel preparedModel) {
    if (preparedModel == nullptr) {
        return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
               << "V1_0::utils::Burst::create must have non-null preparedModel";
    }

    return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(preparedModel));
}

Burst::Burst(PrivateConstructorTag /*tag*/, nn::SharedPreparedModel preparedModel)
    : kPreparedModel(std::move(preparedModel)) {
    CHECK(kPreparedModel != nullptr);
}

Burst::OptionalCacheHold Burst::cacheMemory(const nn::Memory& /*memory*/) const {
    return nullptr;
}

nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
        const nn::Request& request, nn::MeasureTiming measure) const {
    return kPreparedModel->execute(request, measure, {}, {});
}

}  // namespace android::hardware::neuralnetworks::V1_0::utils
+4 −0
Original line number Diff line number Diff line
@@ -106,6 +106,10 @@ nn::DeviceType Device::getType() const {
    return nn::DeviceType::OTHER;
}

bool Device::isUpdatable() const {
    return false;
}

const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
    return kExtensions;
}
Loading