Loading neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h +0 −1 Original line number Diff line number Diff line Loading @@ -37,7 +37,6 @@ namespace android::hardware::neuralnetworks::V1_2::utils { nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device); nn::GeneralResult<nn::DeviceType> initDeviceType(V1_2::IDevice* device); nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* device); nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device); nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded( V1_2::IDevice* device); Loading neuralnetworks/1.2/utils/src/Device.cpp +24 −21 Original line number Diff line number Diff line Loading @@ -42,6 +42,30 @@ #include <vector> namespace android::hardware::neuralnetworks::V1_2::utils { namespace { nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) { if (status != V1_0::ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_2 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_2(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } } // namespace nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device) { CHECK(device != nullptr); Loading Loading @@ -106,27 +130,6 @@ nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* devi return result; } nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) { if (status != V1_0::ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_2 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_2(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded( V1_2::IDevice* device) { CHECK(device != nullptr); Loading neuralnetworks/1.3/utils/src/Device.cpp +22 −1 Original line number Diff line number Diff line Loading @@ -71,6 +71,27 @@ nn::GeneralResult<nn::SharedBuffer> convert( return NN_TRY(std::move(result)); } nn::GeneralResult<nn::Capabilities> initCapabilities(V1_3::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](ErrorStatus status, const Capabilities& capabilities) { if (status != ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_3 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_3(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } } // namespace nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name, Loading @@ -87,7 +108,7 @@ nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name auto versionString = NN_TRY(V1_2::utils::initVersionString(device.get())); const auto deviceType = NN_TRY(V1_2::utils::initDeviceType(device.get())); auto extensions = NN_TRY(V1_2::utils::initExtensions(device.get())); auto capabilities = NN_TRY(V1_2::utils::initCapabilities(device.get())); auto capabilities = NN_TRY(initCapabilities(device.get())); const auto numberOfCacheFilesNeeded = NN_TRY(V1_2::utils::initNumberOfCacheFilesNeeded(device.get())); Loading Loading
neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h +0 −1 Original line number Diff line number Diff line Loading @@ -37,7 +37,6 @@ namespace android::hardware::neuralnetworks::V1_2::utils { nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device); nn::GeneralResult<nn::DeviceType> initDeviceType(V1_2::IDevice* device); nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* device); nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device); nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded( V1_2::IDevice* device); Loading
neuralnetworks/1.2/utils/src/Device.cpp +24 −21 Original line number Diff line number Diff line Loading @@ -42,6 +42,30 @@ #include <vector> namespace android::hardware::neuralnetworks::V1_2::utils { namespace { nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) { if (status != V1_0::ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_2 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_2(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } } // namespace nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device) { CHECK(device != nullptr); Loading Loading @@ -106,27 +130,6 @@ nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* devi return result; } nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) { if (status != V1_0::ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_2 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_2(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded( V1_2::IDevice* device) { CHECK(device != nullptr); Loading
neuralnetworks/1.3/utils/src/Device.cpp +22 −1 Original line number Diff line number Diff line Loading @@ -71,6 +71,27 @@ nn::GeneralResult<nn::SharedBuffer> convert( return NN_TRY(std::move(result)); } nn::GeneralResult<nn::Capabilities> initCapabilities(V1_3::IDevice* device) { CHECK(device != nullptr); nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized"; const auto cb = [&result](ErrorStatus status, const Capabilities& capabilities) { if (status != ErrorStatus::NONE) { const auto canonical = validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE); result = NN_ERROR(canonical) << "getCapabilities_1_3 failed with " << toString(status); } else { result = validatedConvertToCanonical(capabilities); } }; const auto ret = device->getCapabilities_1_3(cb); NN_TRY(hal::utils::handleTransportError(ret)); return result; } } // namespace nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name, Loading @@ -87,7 +108,7 @@ nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name auto versionString = NN_TRY(V1_2::utils::initVersionString(device.get())); const auto deviceType = NN_TRY(V1_2::utils::initDeviceType(device.get())); auto extensions = NN_TRY(V1_2::utils::initExtensions(device.get())); auto capabilities = NN_TRY(V1_2::utils::initCapabilities(device.get())); auto capabilities = NN_TRY(initCapabilities(device.get())); const auto numberOfCacheFilesNeeded = NN_TRY(V1_2::utils::initNumberOfCacheFilesNeeded(device.get())); Loading