Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 33661d54 authored by Lev Proleev's avatar Lev Proleev Committed by android-build-merger
Browse files

Add quantization coupling test

am: 9226c1e9

Change-Id: Ie09bedb43ec68b5e2f8487a1b4f4da07e0131607
parents d866d3c3 9226c1e9
Loading
Loading
Loading
Loading
+14 −28
Original line number Diff line number Diff line
@@ -456,8 +456,7 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
    }

    // Execute and verify results.
    EvaluatePreparedModel(preparedModel, testModel,
                          /*testDynamicOutputShape=*/false);
    EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
}

TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
@@ -519,8 +518,7 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
    }

    // Execute and verify results.
    EvaluatePreparedModel(preparedModel, testModel,
                          /*testDynamicOutputShape=*/false);
    EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
}

TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
@@ -541,8 +539,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -566,8 +563,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -590,8 +586,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -615,8 +610,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -727,8 +721,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -752,8 +745,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -776,8 +768,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -801,8 +792,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -914,8 +904,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -937,8 +926,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
        saveModelToCache(model, modelCache, dataCache, &preparedModel);
        ASSERT_NE(preparedModel, nullptr);
        // Execute and verify results.
        EvaluatePreparedModel(preparedModel, testModel,
                              /*testDynamicOutputShape=*/false);
        EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
        // Check if prepareModelFromCache fails.
        preparedModel = nullptr;
        ErrorStatus status;
@@ -1082,8 +1070,7 @@ TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
                ASSERT_EQ(preparedModel, nullptr);
            } else {
                ASSERT_NE(preparedModel, nullptr);
                EvaluatePreparedModel(preparedModel, testModelAdd,
                                      /*testDynamicOutputShape=*/false);
                EvaluatePreparedModel(preparedModel, testModelAdd, /*testKind=*/TestKind::GENERAL);
            }
        }
    }
@@ -1144,8 +1131,7 @@ TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
                ASSERT_EQ(preparedModel, nullptr);
            } else {
                ASSERT_NE(preparedModel, nullptr);
                EvaluatePreparedModel(preparedModel, testModelAdd,
                                      /*testDynamicOutputShape=*/false);
                EvaluatePreparedModel(preparedModel, testModelAdd, /*testKind=*/TestKind::GENERAL);
            }
        }
    }
+129 −21
Original line number Diff line number Diff line
@@ -79,6 +79,21 @@ struct TestConfig {
    Executor executor;
    MeasureTiming measureTiming;
    OutputType outputType;
    // `reportSkipping` indicates if a test should print an info message in case
    // it is skipped. The field is set to true by default and is set to false in
    // quantization coupling tests to suppress skipping a test
    bool reportSkipping;
    TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType)
        : executor(executor),
          measureTiming(measureTiming),
          outputType(outputType),
          reportSkipping(true) {}
    TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType,
               bool reportSkipping)
        : executor(executor),
          measureTiming(measureTiming),
          outputType(outputType),
          reportSkipping(reportSkipping) {}
};

}  // namespace
@@ -219,7 +234,10 @@ static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
}

void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           const TestConfig& testConfig) {
                           const TestConfig& testConfig, bool* skipped = nullptr) {
    if (skipped != nullptr) {
        *skipped = false;
    }
    // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
    if (testConfig.outputType == OutputType::INSUFFICIENT &&
        !isOutputSizeGreaterThanOne(testModel, 0)) {
@@ -290,6 +308,12 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo

    if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
        executionStatus == ErrorStatus::GENERAL_FAILURE) {
        if (skipped != nullptr) {
            *skipped = true;
        }
        if (!testConfig.reportSkipping) {
            return;
        }
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
                     "execute model that it does not support.";
        std::cout << "[          ]   Early termination of test because vendor service cannot "
@@ -343,44 +367,117 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
}

void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           bool testDynamicOutputShape) {
                           TestKind testKind) {
    std::initializer_list<OutputType> outputTypesList;
    std::initializer_list<MeasureTiming> measureTimingList;
    std::initializer_list<Executor> executorList;

    if (testDynamicOutputShape) {
        outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
    switch (testKind) {
        case TestKind::GENERAL: {
            outputTypesList = {OutputType::FULLY_SPECIFIED};
            measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
            executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
    } else {
        outputTypesList = {OutputType::FULLY_SPECIFIED};
        } break;
        case TestKind::DYNAMIC_SHAPE: {
            outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
            measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
            executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
        } break;
        case TestKind::QUANTIZATION_COUPLING: {
            LOG(FATAL) << "Wrong TestKind for EvaluatePreparedModel";
            return;
        } break;
    }

    for (const OutputType outputType : outputTypesList) {
        for (const MeasureTiming measureTiming : measureTimingList) {
            for (const Executor executor : executorList) {
                const TestConfig testConfig = {.executor = executor,
                                               .measureTiming = measureTiming,
                                               .outputType = outputType};
                const TestConfig testConfig(executor, measureTiming, outputType);
                EvaluatePreparedModel(preparedModel, testModel, testConfig);
            }
        }
    }
}

void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) {
void EvaluatePreparedCoupledModels(const sp<IPreparedModel>& preparedModel,
                                   const TestModel& testModel,
                                   const sp<IPreparedModel>& preparedCoupledModel,
                                   const TestModel& coupledModel) {
    std::initializer_list<OutputType> outputTypesList = {OutputType::FULLY_SPECIFIED};
    std::initializer_list<MeasureTiming> measureTimingList = {MeasureTiming::NO,
                                                              MeasureTiming::YES};
    std::initializer_list<Executor> executorList = {Executor::ASYNC, Executor::SYNC,
                                                    Executor::BURST};

    for (const OutputType outputType : outputTypesList) {
        for (const MeasureTiming measureTiming : measureTimingList) {
            for (const Executor executor : executorList) {
                const TestConfig testConfig(executor, measureTiming, outputType,
                                            /*reportSkipping=*/false);
                bool baseSkipped = false;
                EvaluatePreparedModel(preparedModel, testModel, testConfig, &baseSkipped);
                bool coupledSkipped = false;
                EvaluatePreparedModel(preparedCoupledModel, coupledModel, testConfig,
                                      &coupledSkipped);
                ASSERT_EQ(baseSkipped, coupledSkipped);
                if (baseSkipped) {
                    LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
                                 "execute model that it does not support.";
                    std::cout << "[          ]   Early termination of test because vendor service "
                                 "cannot "
                                 "execute model that it does not support."
                              << std::endl;
                    GTEST_SKIP();
                }
            }
        }
    }
}

void Execute(const sp<IDevice>& device, const TestModel& testModel, TestKind testKind) {
    Model model = createModel(testModel);
    if (testDynamicOutputShape) {
    if (testKind == TestKind::DYNAMIC_SHAPE) {
        makeOutputDimensionsUnspecified(&model);
    }

    sp<IPreparedModel> preparedModel;
    switch (testKind) {
        case TestKind::GENERAL: {
            createPreparedModel(device, model, &preparedModel);
            if (preparedModel == nullptr) return;

    EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
            EvaluatePreparedModel(preparedModel, testModel, TestKind::GENERAL);
        } break;
        case TestKind::DYNAMIC_SHAPE: {
            createPreparedModel(device, model, &preparedModel);
            if (preparedModel == nullptr) return;
            EvaluatePreparedModel(preparedModel, testModel, TestKind::DYNAMIC_SHAPE);
        } break;
        case TestKind::QUANTIZATION_COUPLING: {
            ASSERT_TRUE(testModel.hasQuant8AsymmOperands());
            createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ false);
            TestModel signedQuantizedModel = convertQuant8AsymmOperandsToSigned(testModel);
            sp<IPreparedModel> preparedCoupledModel;
            createPreparedModel(device, createModel(signedQuantizedModel), &preparedCoupledModel,
                                /*reportSkipping*/ false);
            // If we couldn't prepare a model with unsigned quantization, we must
            // fail to prepare a model with signed quantization as well.
            if (preparedModel == nullptr) {
                ASSERT_EQ(preparedCoupledModel, nullptr);
                // If we failed to prepare both of the models, we can safely skip
                // the test.
                LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
                             "prepare model that it does not support.";
                std::cout
                        << "[          ]   Early termination of test because vendor service cannot "
                           "prepare model that it does not support."
                        << std::endl;
                GTEST_SKIP();
            }
            ASSERT_NE(preparedCoupledModel, nullptr);
            EvaluatePreparedCoupledModels(preparedModel, testModel, preparedCoupledModel,
                                          signedQuantizedModel);
        } break;
    }
}

void GeneratedTestBase::SetUp() {
@@ -403,12 +500,19 @@ class GeneratedTest : public GeneratedTestBase {};
// Tag for the dynamic output shape tests
class DynamicOutputShapeTest : public GeneratedTest {};

// Tag for the dynamic output shape tests
class DISABLED_QuantizationCouplingTest : public GeneratedTest {};

TEST_P(GeneratedTest, Test) {
    Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/false);
    Execute(kDevice, kTestModel, /*testKind=*/TestKind::GENERAL);
}

TEST_P(DynamicOutputShapeTest, Test) {
    Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/true);
    Execute(kDevice, kTestModel, /*testKind=*/TestKind::DYNAMIC_SHAPE);
}

TEST_P(DISABLED_QuantizationCouplingTest, Test) {
    Execute(kDevice, kTestModel, /*testKind=*/TestKind::QUANTIZATION_COUPLING);
}

INSTANTIATE_GENERATED_TEST(GeneratedTest,
@@ -417,4 +521,8 @@ INSTANTIATE_GENERATED_TEST(GeneratedTest,
INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
                           [](const TestModel& testModel) { return !testModel.expectFailure; });

INSTANTIATE_GENERATED_TEST(DISABLED_QuantizationCouplingTest, [](const TestModel& testModel) {
    return testModel.hasQuant8AsymmOperands() && testModel.operations.size() == 1;
});

}  // namespace android::hardware::neuralnetworks::V1_3::vts::functional
+12 −1
Original line number Diff line number Diff line
@@ -57,8 +57,19 @@ Model createModel(const test_helper::TestModel& testModel);

void PrepareModel(const sp<IDevice>& device, const Model& model, sp<IPreparedModel>* preparedModel);

enum class TestKind {
    // Runs a test model and compares the results to a golden data
    GENERAL,
    // Same as GENERAL but sets dimensions for the output tensors to zeros
    DYNAMIC_SHAPE,
    // Tests if quantized model with TENSOR_QUANT8_ASYMM produces the same result
    // (OK/SKIPPED/FAILED) as the model with all such tensors converted to
    // TENSOR_QUANT8_ASYMM_SIGNED.
    QUANTIZATION_COUPLING
};

void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel,
                           const test_helper::TestModel& testModel, bool testDynamicOutputShape);
                           const test_helper::TestModel& testModel, TestKind testKind);

}  // namespace android::hardware::neuralnetworks::V1_3::vts::functional

+4 −1
Original line number Diff line number Diff line
@@ -37,7 +37,7 @@ using V1_1::ExecutionPreference;

// internal helper function
void createPreparedModel(const sp<IDevice>& device, const Model& model,
                         sp<IPreparedModel>* preparedModel) {
                         sp<IPreparedModel>* preparedModel, bool reportSkipping) {
    ASSERT_NE(nullptr, preparedModel);
    *preparedModel = nullptr;

@@ -74,6 +74,9 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
    // can continue.
    if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
        ASSERT_EQ(nullptr, preparedModel->get());
        if (!reportSkipping) {
            return;
        }
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot prepare "
                     "model that it does not support.";
        std::cout << "[          ]   Early termination of test because vendor service cannot "
+1 −1
Original line number Diff line number Diff line
@@ -47,7 +47,7 @@ std::string printNeuralnetworksHidlTest(
// Create an IPreparedModel object. If the model cannot be prepared,
// "preparedModel" will be nullptr instead.
void createPreparedModel(const sp<IDevice>& device, const Model& model,
                         sp<IPreparedModel>* preparedModel);
                         sp<IPreparedModel>* preparedModel, bool reportSkipping = true);

// Utility function to get PreparedModel from callback and downcast to V1_2.
sp<IPreparedModel> getPreparedModel_1_3(const sp<implementation::PreparedModelCallback>& callback);