Loading neuralnetworks/aidl/utils/src/Device.cpp +1 −1 Original line number Diff line number Diff line Loading @@ -119,7 +119,7 @@ nn::GeneralResult<std::pair<uint32_t, uint32_t>> getNumberOfCacheFilesNeededFrom << numberOfCacheFiles.numDataCache << " vs " << nn::kMaxNumberOfCacheFiles << ")"; } return std::make_pair(numberOfCacheFiles.numDataCache, numberOfCacheFiles.numModelCache); return std::make_pair(numberOfCacheFiles.numModelCache, numberOfCacheFiles.numDataCache); } } // namespace Loading neuralnetworks/aidl/utils/test/DeviceTest.cpp +16 −1 Original line number Diff line number Diff line Loading @@ -58,7 +58,7 @@ const std::string kInvalidName = ""; const std::shared_ptr<BnDevice> kInvalidDevice; constexpr PerformanceInfo kNoPerformanceInfo = {.execTime = std::numeric_limits<float>::max(), .powerUsage = std::numeric_limits<float>::max()}; constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles, constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles - 1, .numDataCache = nn::kMaxNumberOfCacheFiles}; constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); }; Loading Loading @@ -300,6 +300,21 @@ TEST(DeviceTest, getSupportedExtensionsDeadObject) { EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); } TEST(DeviceTest, getNumberOfCacheFilesNeeded) { // setup call const auto mockDevice = createMockDevice(); EXPECT_CALL(*mockDevice, getNumberOfCacheFilesNeeded(_)).Times(1); // run test const auto result = Device::create(kName, mockDevice); // verify result ASSERT_TRUE(result.has_value()); constexpr auto kNumberOfCacheFilesPair = std::make_pair<uint32_t, uint32_t>( kNumberOfCacheFiles.numModelCache, kNumberOfCacheFiles.numDataCache); EXPECT_EQ(result.value()->getNumberOfCacheFilesNeeded(), kNumberOfCacheFilesPair); } TEST(DeviceTest, getNumberOfCacheFilesNeededError) { // setup call const auto mockDevice = createMockDevice(); Loading Loading
neuralnetworks/aidl/utils/src/Device.cpp +1 −1 Original line number Diff line number Diff line Loading @@ -119,7 +119,7 @@ nn::GeneralResult<std::pair<uint32_t, uint32_t>> getNumberOfCacheFilesNeededFrom << numberOfCacheFiles.numDataCache << " vs " << nn::kMaxNumberOfCacheFiles << ")"; } return std::make_pair(numberOfCacheFiles.numDataCache, numberOfCacheFiles.numModelCache); return std::make_pair(numberOfCacheFiles.numModelCache, numberOfCacheFiles.numDataCache); } } // namespace Loading
neuralnetworks/aidl/utils/test/DeviceTest.cpp +16 −1 Original line number Diff line number Diff line Loading @@ -58,7 +58,7 @@ const std::string kInvalidName = ""; const std::shared_ptr<BnDevice> kInvalidDevice; constexpr PerformanceInfo kNoPerformanceInfo = {.execTime = std::numeric_limits<float>::max(), .powerUsage = std::numeric_limits<float>::max()}; constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles, constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles - 1, .numDataCache = nn::kMaxNumberOfCacheFiles}; constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); }; Loading Loading @@ -300,6 +300,21 @@ TEST(DeviceTest, getSupportedExtensionsDeadObject) { EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); } TEST(DeviceTest, getNumberOfCacheFilesNeeded) { // setup call const auto mockDevice = createMockDevice(); EXPECT_CALL(*mockDevice, getNumberOfCacheFilesNeeded(_)).Times(1); // run test const auto result = Device::create(kName, mockDevice); // verify result ASSERT_TRUE(result.has_value()); constexpr auto kNumberOfCacheFilesPair = std::make_pair<uint32_t, uint32_t>( kNumberOfCacheFiles.numModelCache, kNumberOfCacheFiles.numDataCache); EXPECT_EQ(result.value()->getNumberOfCacheFilesNeeded(), kNumberOfCacheFilesPair); } TEST(DeviceTest, getNumberOfCacheFilesNeededError) { // setup call const auto mockDevice = createMockDevice(); Loading