Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 66f598e1 authored by Lev Proleev's avatar Lev Proleev
Browse files

Combine test parameters into TestConfig structure

Test: VtsHalNeuralnetworksV1_2TargetTest --gtest_filter="GeneratedTests*"
Change-Id: I928aaa42e4745b4a8e0e461046e9632b052d0135
parent 8774f10b
Loading
Loading
Loading
Loading
+46 −49
Original line number Original line Diff line number Diff line
@@ -58,8 +58,20 @@ using V1_0::Request;
using V1_1::ExecutionPreference;
using V1_1::ExecutionPreference;
using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;


namespace {

enum class Executor { ASYNC, SYNC, BURST };

enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };


struct TestConfig {
    Executor executor;
    MeasureTiming measureTiming;
    OutputType outputType;
};

}  // namespace

Model createModel(const TestModel& testModel) {
Model createModel(const TestModel& testModel) {
    // Model operands.
    // Model operands.
    hidl_vec<Operand> operands(testModel.operands.size());
    hidl_vec<Operand> operands(testModel.operands.size());
@@ -194,31 +206,31 @@ static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
    return android::nn::ExecutionBurstController::create(preparedModel,
    return android::nn::ExecutionBurstController::create(preparedModel,
                                                         std::chrono::microseconds{0});
                                                         std::chrono::microseconds{0});
}
}
enum class Executor { ASYNC, SYNC, BURST };


void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           Executor executor, MeasureTiming measure, OutputType outputType) {
                           const TestConfig& testConfig) {
    // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
    // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
    if (outputType == OutputType::INSUFFICIENT && !isOutputSizeGreaterThanOne(testModel, 0)) {
    if (testConfig.outputType == OutputType::INSUFFICIENT &&
        !isOutputSizeGreaterThanOne(testModel, 0)) {
        return;
        return;
    }
    }


    Request request = createRequest(testModel);
    Request request = createRequest(testModel);
    if (outputType == OutputType::INSUFFICIENT) {
    if (testConfig.outputType == OutputType::INSUFFICIENT) {
        makeOutputInsufficientSize(/*outputIndex=*/0, &request);
        makeOutputInsufficientSize(/*outputIndex=*/0, &request);
    }
    }


    ErrorStatus executionStatus;
    ErrorStatus executionStatus;
    hidl_vec<OutputShape> outputShapes;
    hidl_vec<OutputShape> outputShapes;
    Timing timing;
    Timing timing;
    switch (executor) {
    switch (testConfig.executor) {
        case Executor::ASYNC: {
        case Executor::ASYNC: {
            SCOPED_TRACE("asynchronous");
            SCOPED_TRACE("asynchronous");


            // launch execution
            // launch execution
            sp<ExecutionCallback> executionCallback = new ExecutionCallback();
            sp<ExecutionCallback> executionCallback = new ExecutionCallback();
            Return<ErrorStatus> executionLaunchStatus =
            Return<ErrorStatus> executionLaunchStatus = ExecutePreparedModel(
                    ExecutePreparedModel(preparedModel, request, measure, executionCallback);
                    preparedModel, request, testConfig.measureTiming, executionCallback);
            ASSERT_TRUE(executionLaunchStatus.isOk());
            ASSERT_TRUE(executionLaunchStatus.isOk());
            EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
            EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));


@@ -234,8 +246,8 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
            SCOPED_TRACE("synchronous");
            SCOPED_TRACE("synchronous");


            // execute
            // execute
            Return<ErrorStatus> executionReturnStatus =
            Return<ErrorStatus> executionReturnStatus = ExecutePreparedModel(
                    ExecutePreparedModel(preparedModel, request, measure, &outputShapes, &timing);
                    preparedModel, request, testConfig.measureTiming, &outputShapes, &timing);
            ASSERT_TRUE(executionReturnStatus.isOk());
            ASSERT_TRUE(executionReturnStatus.isOk());
            executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
            executionStatus = static_cast<ErrorStatus>(executionReturnStatus);


@@ -258,14 +270,14 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
            // execute burst
            // execute burst
            int n;
            int n;
            std::tie(n, outputShapes, timing, std::ignore) =
            std::tie(n, outputShapes, timing, std::ignore) =
                    controller->compute(request, measure, keys);
                    controller->compute(request, testConfig.measureTiming, keys);
            executionStatus = nn::convertResultCodeToErrorStatus(n);
            executionStatus = nn::convertResultCodeToErrorStatus(n);


            break;
            break;
        }
        }
    }
    }


    if (outputType != OutputType::FULLY_SPECIFIED &&
    if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
        executionStatus == ErrorStatus::GENERAL_FAILURE) {
        executionStatus == ErrorStatus::GENERAL_FAILURE) {
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
                     "execute model that it does not support.";
                     "execute model that it does not support.";
@@ -274,7 +286,7 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
                  << std::endl;
                  << std::endl;
        GTEST_SKIP();
        GTEST_SKIP();
    }
    }
    if (measure == MeasureTiming::NO) {
    if (testConfig.measureTiming == MeasureTiming::NO) {
        EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
        EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
        EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
        EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
    } else {
    } else {
@@ -283,7 +295,7 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
        }
        }
    }
    }


    switch (outputType) {
    switch (testConfig.outputType) {
        case OutputType::FULLY_SPECIFIED:
        case OutputType::FULLY_SPECIFIED:
            // If the model output operands are fully specified, outputShapes must be either
            // If the model output operands are fully specified, outputShapes must be either
            // either empty, or have the same number of elements as the number of outputs.
            // either empty, or have the same number of elements as the number of outputs.
@@ -321,44 +333,29 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo


void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           bool testDynamicOutputShape) {
                           bool testDynamicOutputShape) {
    std::initializer_list<OutputType> outputTypesList;
    std::initializer_list<MeasureTiming> measureTimingList;
    std::initializer_list<Executor> executorList;

    if (testDynamicOutputShape) {
    if (testDynamicOutputShape) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
        outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
                              OutputType::UNSPECIFIED);
        measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
        executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
    } else {
    } else {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
        outputTypesList = {OutputType::FULLY_SPECIFIED};
                              OutputType::FULLY_SPECIFIED);
        measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
        executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
                              OutputType::FULLY_SPECIFIED);
    }
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,

                              OutputType::FULLY_SPECIFIED);
    for (const OutputType outputType : outputTypesList) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
        for (const MeasureTiming measureTiming : measureTimingList) {
                              OutputType::FULLY_SPECIFIED);
            for (const Executor executor : executorList) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                const TestConfig testConfig = {.executor = executor,
                              OutputType::FULLY_SPECIFIED);
                                               .measureTiming = measureTiming,
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                                               .outputType = outputType};
                              OutputType::FULLY_SPECIFIED);
                EvaluatePreparedModel(preparedModel, testModel, testConfig);
            }
        }
    }
    }
}
}


+46 −49
Original line number Original line Diff line number Diff line
@@ -69,8 +69,20 @@ using V1_2::Timing;
using V1_2::implementation::ExecutionCallback;
using V1_2::implementation::ExecutionCallback;
using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;


namespace {

enum class Executor { ASYNC, SYNC, BURST };

enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };


struct TestConfig {
    Executor executor;
    MeasureTiming measureTiming;
    OutputType outputType;
};

}  // namespace

Model createModel(const TestModel& testModel) {
Model createModel(const TestModel& testModel) {
    // Model operands.
    // Model operands.
    hidl_vec<Operand> operands(testModel.operands.size());
    hidl_vec<Operand> operands(testModel.operands.size());
@@ -205,31 +217,31 @@ static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
    return android::nn::ExecutionBurstController::create(preparedModel,
    return android::nn::ExecutionBurstController::create(preparedModel,
                                                         std::chrono::microseconds{0});
                                                         std::chrono::microseconds{0});
}
}
enum class Executor { ASYNC, SYNC, BURST };


void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           Executor executor, MeasureTiming measure, OutputType outputType) {
                           const TestConfig& testConfig) {
    // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
    // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
    if (outputType == OutputType::INSUFFICIENT && !isOutputSizeGreaterThanOne(testModel, 0)) {
    if (testConfig.outputType == OutputType::INSUFFICIENT &&
        !isOutputSizeGreaterThanOne(testModel, 0)) {
        return;
        return;
    }
    }


    Request request = createRequest(testModel);
    Request request = createRequest(testModel);
    if (outputType == OutputType::INSUFFICIENT) {
    if (testConfig.outputType == OutputType::INSUFFICIENT) {
        makeOutputInsufficientSize(/*outputIndex=*/0, &request);
        makeOutputInsufficientSize(/*outputIndex=*/0, &request);
    }
    }


    ErrorStatus executionStatus;
    ErrorStatus executionStatus;
    hidl_vec<OutputShape> outputShapes;
    hidl_vec<OutputShape> outputShapes;
    Timing timing;
    Timing timing;
    switch (executor) {
    switch (testConfig.executor) {
        case Executor::ASYNC: {
        case Executor::ASYNC: {
            SCOPED_TRACE("asynchronous");
            SCOPED_TRACE("asynchronous");


            // launch execution
            // launch execution
            sp<ExecutionCallback> executionCallback = new ExecutionCallback();
            sp<ExecutionCallback> executionCallback = new ExecutionCallback();
            Return<ErrorStatus> executionLaunchStatus =
            Return<ErrorStatus> executionLaunchStatus = ExecutePreparedModel(
                    ExecutePreparedModel(preparedModel, request, measure, executionCallback);
                    preparedModel, request, testConfig.measureTiming, executionCallback);
            ASSERT_TRUE(executionLaunchStatus.isOk());
            ASSERT_TRUE(executionLaunchStatus.isOk());
            EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
            EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));


@@ -245,8 +257,8 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
            SCOPED_TRACE("synchronous");
            SCOPED_TRACE("synchronous");


            // execute
            // execute
            Return<ErrorStatus> executionReturnStatus =
            Return<ErrorStatus> executionReturnStatus = ExecutePreparedModel(
                    ExecutePreparedModel(preparedModel, request, measure, &outputShapes, &timing);
                    preparedModel, request, testConfig.measureTiming, &outputShapes, &timing);
            ASSERT_TRUE(executionReturnStatus.isOk());
            ASSERT_TRUE(executionReturnStatus.isOk());
            executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
            executionStatus = static_cast<ErrorStatus>(executionReturnStatus);


@@ -269,14 +281,14 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
            // execute burst
            // execute burst
            int n;
            int n;
            std::tie(n, outputShapes, timing, std::ignore) =
            std::tie(n, outputShapes, timing, std::ignore) =
                    controller->compute(request, measure, keys);
                    controller->compute(request, testConfig.measureTiming, keys);
            executionStatus = nn::convertResultCodeToErrorStatus(n);
            executionStatus = nn::convertResultCodeToErrorStatus(n);


            break;
            break;
        }
        }
    }
    }


    if (outputType != OutputType::FULLY_SPECIFIED &&
    if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
        executionStatus == ErrorStatus::GENERAL_FAILURE) {
        executionStatus == ErrorStatus::GENERAL_FAILURE) {
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
                     "execute model that it does not support.";
                     "execute model that it does not support.";
@@ -285,7 +297,7 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
                  << std::endl;
                  << std::endl;
        GTEST_SKIP();
        GTEST_SKIP();
    }
    }
    if (measure == MeasureTiming::NO) {
    if (testConfig.measureTiming == MeasureTiming::NO) {
        EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
        EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
        EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
        EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
    } else {
    } else {
@@ -294,7 +306,7 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
        }
        }
    }
    }


    switch (outputType) {
    switch (testConfig.outputType) {
        case OutputType::FULLY_SPECIFIED:
        case OutputType::FULLY_SPECIFIED:
            // If the model output operands are fully specified, outputShapes must be either
            // If the model output operands are fully specified, outputShapes must be either
            // either empty, or have the same number of elements as the number of outputs.
            // either empty, or have the same number of elements as the number of outputs.
@@ -332,44 +344,29 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo


void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
                           bool testDynamicOutputShape) {
                           bool testDynamicOutputShape) {
    std::initializer_list<OutputType> outputTypesList;
    std::initializer_list<MeasureTiming> measureTimingList;
    std::initializer_list<Executor> executorList;

    if (testDynamicOutputShape) {
    if (testDynamicOutputShape) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
        outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
                              OutputType::UNSPECIFIED);
        measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
        executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                              OutputType::UNSPECIFIED);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                              OutputType::INSUFFICIENT);
    } else {
    } else {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
        outputTypesList = {OutputType::FULLY_SPECIFIED};
                              OutputType::FULLY_SPECIFIED);
        measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
        executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
                              OutputType::FULLY_SPECIFIED);
    }
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,

                              OutputType::FULLY_SPECIFIED);
    for (const OutputType outputType : outputTypesList) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
        for (const MeasureTiming measureTiming : measureTimingList) {
                              OutputType::FULLY_SPECIFIED);
            for (const Executor executor : executorList) {
        EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
                const TestConfig testConfig = {.executor = executor,
                              OutputType::FULLY_SPECIFIED);
                                               .measureTiming = measureTiming,
        EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
                                               .outputType = outputType};
                              OutputType::FULLY_SPECIFIED);
                EvaluatePreparedModel(preparedModel, testModel, testConfig);
            }
        }
    }
    }
}
}