summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorandroid-build-team Robot <android-build-team-robot@google.com>2019-06-19 03:01:47 +0000
committerandroid-build-team Robot <android-build-team-robot@google.com>2019-06-19 03:01:47 +0000
commite53b7764f026d1c129493f0a8bd34b5b6766c355 (patch)
tree0e88087a8f9a07f356147c704be1d44bbe05cfab
parentb24c8bb20c00eb8452e3d81d7144788dcbc32915 (diff)
parentadbc13a818fdefc43ef8cc29e9f68c9a39bdb830 (diff)
downloadml-e53b7764f026d1c129493f0a8bd34b5b6766c355.tar.gz
Change-Id: I662781f4692ff0069f6cea93523d90530f37b66c
-rw-r--r--nn/common/operations/BidirectionalSequenceRNN.cpp10
-rw-r--r--nn/runtime/test/generated/examples/bidirectional_sequence_rnn.example.cpp48
-rw-r--r--nn/runtime/test/generated/models/bidirectional_sequence_rnn.model.cpp1196
-rw-r--r--nn/runtime/test/generated/vts_models/bidirectional_sequence_rnn.model.cpp144
-rw-r--r--nn/runtime/test/specs/V1_2/bidirectional_sequence_rnn.mod.py30
5 files changed, 706 insertions, 722 deletions
diff --git a/nn/common/operations/BidirectionalSequenceRNN.cpp b/nn/common/operations/BidirectionalSequenceRNN.cpp
index 0e8bc6d81..32ab00fa0 100644
--- a/nn/common/operations/BidirectionalSequenceRNN.cpp
+++ b/nn/common/operations/BidirectionalSequenceRNN.cpp
@@ -168,12 +168,18 @@ bool executeTyped(IOperationExecutionContext* context) {
const uint32_t maxTime = getSizeOfDimension(inputShape, 0);
const uint32_t batchSize = getSizeOfDimension(inputShape, 1);
const uint32_t inputSize = getSizeOfDimension(inputShape, 2);
- const uint32_t auxInputSize = getSizeOfDimension(auxInputShape, 2);
+ uint32_t auxInputSize = 0;
+ if (hasAuxInputs) {
+ auxInputSize = getSizeOfDimension(auxInputShape, 2);
+ }
const uint32_t fwNumUnits = getSizeOfDimension(fwWeightsShape, 0);
const uint32_t bwNumUnits = getSizeOfDimension(bwWeightsShape, 0);
Shape fixedTimeInputShape = removeFirstDim(inputShape);
- Shape fixedTimeAuxInputShape = removeFirstDim(auxInputShape);
+ Shape fixedTimeAuxInputShape = auxInputShape;
+ if (hasAuxInputs) {
+ fixedTimeAuxInputShape = removeFirstDim(auxInputShape);
+ }
// Create an additional buffer to store a hidden state between steps.
std::vector<T> tempHiddenState(batchSize * fwNumUnits);
diff --git a/nn/runtime/test/generated/examples/bidirectional_sequence_rnn.example.cpp b/nn/runtime/test/generated/examples/bidirectional_sequence_rnn.example.cpp
index 931f455bf..0e4292672 100644
--- a/nn/runtime/test/generated/examples/bidirectional_sequence_rnn.example.cpp
+++ b/nn/runtime/test/generated/examples/bidirectional_sequence_rnn.example.cpp
@@ -8,7 +8,7 @@ static std::vector<MixedTypedExample> examples_blackbox = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -65,7 +65,7 @@ static std::vector<MixedTypedExample> examples_blackbox_relaxed = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -122,7 +122,7 @@ static std::vector<MixedTypedExample> examples_blackbox_float16 = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -179,7 +179,7 @@ static std::vector<MixedTypedExample> examples_blackbox_dynamic_output_shape = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -236,7 +236,7 @@ static std::vector<MixedTypedExample> examples_blackbox_dynamic_output_shape_rel
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -293,7 +293,7 @@ static std::vector<MixedTypedExample> examples_blackbox_dynamic_output_shape_flo
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -350,7 +350,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -407,7 +407,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_relaxed = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -464,7 +464,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_float16 = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -521,7 +521,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_dynamic_outpu
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -578,7 +578,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_dynamic_outpu
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -635,7 +635,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_dynamic_outpu
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -692,7 +692,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -749,7 +749,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -806,7 +806,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -863,7 +863,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -920,7 +920,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -977,7 +977,7 @@ static std::vector<MixedTypedExample> examples_blackbox_time_major_merge_outputs
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {16, 2, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -1034,7 +1034,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs = {
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {-0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -1091,7 +1091,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs_relaxed
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {-0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -1148,7 +1148,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs_float16
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
@@ -1205,7 +1205,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs_dynamic_
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {-0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -1262,7 +1262,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs_dynamic_
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {{0, {-0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f, -0.56172788f, -0.044897556f, 0.89964068f, -0.067662835f, 0.76863563f, 0.93455386f, -0.6324693f, -0.083922029f, -0.12287641f, 0.70117295f, 0.90532446f, 0.89782166f, 0.79817224f, 0.53402734f, -0.33286154f, 0.073485017f, 0.28153265f, 0.19455957f, -0.55479527f, -0.56042433f, 0.26048636f, 0.84702539f, 0.47587705f, -0.074295521f, 0.14744234f, 0.62589407f, 0.1653645f, -0.10651493f, -0.045277178f, 0.99032974f, -0.88255352f, -0.85147917f, 0.27531278f, -0.095755219f, -0.712036f, -0.17384434f, -0.50593495f, -0.18646687f, -0.96508682f, 0.43519354f, 0.66521823f, 0.0300982f, -0.7747041f, -0.02038002f, 0.020698071f, -0.90300065f, 0.62870288f, -0.23068321f, 0.19379807f, 0.29120302f, 0.077113032f, -0.70331609f, 0.15804303f, -0.93407321f, 0.40182066f, 0.036301374f, -0.058871567f, -0.66609079f, 0.59098077f, 0.73017097f, 0.74604273f, 0.32882881f, -0.17503482f, 0.22396147f, 0.74861872f, 0.17831337f, 0.2755419f, 0.51864719f, 0.55084288f, 0.58982027f, -0.47443086f, 0.20875752f, 0.87825835f, 0.59929144f, 0.62827742f, 0.18899453f, 0.31440187f, 0.99059987f, 0.87170351f, -0.35091716f, 0.47791874f, 0.86496925f, -0.53376222f, 0.85315156f, 0.10288584f, 0.86684f, -0.011186242f, 0.10513687f, 0.65791464f, 0.35130811f, 0.80834007f, -0.61777675f, -0.21095741f, 0.41213346f, 0.73784804f, 0.094794154f, 0.33468103f, 0.75801885f, 0.30660987f, -0.37354088f, 0.77002847f, -0.62747043f, -0.68572164f, 0.0069220066f, -0.99774903f, -0.98858172f, -0.38952237f, -0.47685933f, 0.31073618f, 0.71511042f, -0.63767755f, -0.31729108f, -0.69424844f, -0.93421471f, -0.87287879f, 0.37144363f, -0.62476718f, 0.23791671f, 0.40060222f, 0.1356622f, 0.23689353f, 0.285385f, 0.037029743f, -0.19858193f, -0.27569133f, 0.43773448f, 0.60379338f, 0.35562468f}}, {1, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {2, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {3, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {4, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {5, {0.461459f, 0.153381f, 0.529743f, -0.00371218f, 0.676267f, -0.211346f, 0.317493f, 0.969689f, -0.343251f, 0.186423f, 0.398151f, 0.152399f, 0.448504f, 0.317662f, 0.523556f, -0.323514f, 0.480877f, 0.333113f, -0.757714f, -0.674487f, -0.643585f, 0.217766f, -0.0251462f, 0.79512f, -0.595574f, -0.422444f, 0.371572f, -0.452178f, -0.556069f, -0.482188f, -0.685456f, -0.727851f, 0.841829f, 0.551535f, -0.232336f, 0.729158f, -0.00294906f, -0.69754f, 0.766073f, -0.178424f, 0.369513f, -0.423241f, 0.548547f, -0.0152023f, -0.757482f, -0.85491f, 0.251331f, -0.989183f, 0.306261f, -0.340716f, 0.886103f, -0.0726757f, -0.723523f, -0.784303f, 0.0354295f, 0.566564f, -0.485469f, -0.620498f, 0.832546f, 0.697884f, -0.279115f, 0.294415f, -0.584313f, 0.548772f, 0.0648819f, 0.968726f, 0.723834f, -0.0080452f, -0.350386f, -0.272803f, 0.115121f, -0.412644f, -0.824713f, -0.992843f, -0.592904f, -0.417893f, 0.863791f, -0.423461f, -0.147601f, -0.770664f, -0.479006f, 0.654782f, 0.587314f, -0.639158f, 0.816969f, -0.337228f, 0.659878f, 0.73107f, 0.754768f, -0.337042f, 0.0960841f, 0.368357f, 0.244191f, -0.817703f, -0.211223f, 0.442012f, 0.37225f, -0.623598f, -0.405423f, 0.455101f, 0.673656f, -0.145345f, -0.511346f, -0.901675f, -0.81252f, -0.127006f, 0.809865f, -0.721884f, 0.636255f, 0.868989f, -0.347973f, -0.10179f, -0.777449f, 0.917274f, 0.819286f, 0.206218f, -0.00785118f, 0.167141f, 0.45872f, 0.972934f, -0.276798f, 0.837861f, 0.747958f, -0.0151566f, -0.330057f, -0.469077f, 0.277308f, 0.415818f}}, {6, {0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.1f}}, {7, {0.065691948f, -0.69055247f, 0.1107955f, -0.97084129f, -0.23957068f, -0.23566568f, -0.389184f, 0.47481549f, -0.4791103f, 0.29931796f, 0.10463274f, 0.83918178f, 0.37197268f, 0.61957061f, 0.3956964f, -0.37609905f}}, {8, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {9, {}}, {10, {}}, {11, {}}},
// int -> INT32 map
@@ -1319,7 +1319,7 @@ static std::vector<MixedTypedExample> examples_blackbox_reversed_inputs_dynamic_
//Input(s)
{ // See tools/test_generator/include/TestHarness.h:MixedTyped
// int -> Dimensions map
- .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0, 0, 0}}, {10, {0, 0}}, {11, {0, 0}}},
+ .operandDimensions = {{0, {2, 16, 8}}, {1, {16, 8}}, {2, {16, 16}}, {3, {16}}, {4, {2, 16}}, {5, {16, 8}}, {6, {16, 16}}, {7, {16}}, {8, {2, 16}}, {9, {0}}, {10, {0}}, {11, {0}}},
// int -> FLOAT32 map
.float32Operands = {},
// int -> INT32 map
diff --git a/nn/runtime/test/generated/models/bidirectional_sequence_rnn.model.cpp b/nn/runtime/test/generated/models/bidirectional_sequence_rnn.model.cpp
index e0822b89e..b6c9e5b4b 100644
--- a/nn/runtime/test/generated/models/bidirectional_sequence_rnn.model.cpp
+++ b/nn/runtime/test/generated/models/bidirectional_sequence_rnn.model.cpp
@@ -6,11 +6,10 @@ void CreateModel(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input = model->addOperand(&type0);
auto fw_weights = model->addOperand(&type1);
@@ -22,13 +21,13 @@ void CreateModel(Model *model) {
auto bw_bias = model->addOperand(&type3);
auto bw_hidden_state = model->addOperand(&type4);
auto aux_input = model->addOperand(&type5);
- auto fw_aux_weights = model->addOperand(&type6);
- auto bw_aux_weights = model->addOperand(&type6);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type7);
- auto bw_output = model->addOperand(&type7);
+ auto fw_aux_weights = model->addOperand(&type5);
+ auto bw_aux_weights = model->addOperand(&type5);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type6);
+ auto bw_output = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -55,11 +54,10 @@ void CreateModel_relaxed(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input = model->addOperand(&type0);
auto fw_weights = model->addOperand(&type1);
@@ -71,13 +69,13 @@ void CreateModel_relaxed(Model *model) {
auto bw_bias = model->addOperand(&type3);
auto bw_hidden_state = model->addOperand(&type4);
auto aux_input = model->addOperand(&type5);
- auto fw_aux_weights = model->addOperand(&type6);
- auto bw_aux_weights = model->addOperand(&type6);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type7);
- auto bw_output = model->addOperand(&type7);
+ auto fw_aux_weights = model->addOperand(&type5);
+ auto bw_aux_weights = model->addOperand(&type5);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type6);
+ auto bw_output = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -101,34 +99,33 @@ inline bool is_ignored_relaxed(int i) {
}
void CreateModel_float16(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type19(Type::TENSOR_FLOAT16, {2, 16, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type17(Type::TENSOR_FLOAT16, {2, 16, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input = model->addOperand(&type22);
- auto fw_weights = model->addOperand(&type21);
- auto fw_recurrent_weights = model->addOperand(&type20);
- auto fw_bias = model->addOperand(&type17);
- auto fw_hidden_state = model->addOperand(&type18);
- auto bw_weights = model->addOperand(&type21);
- auto bw_recurrent_weights = model->addOperand(&type20);
- auto bw_bias = model->addOperand(&type17);
- auto bw_hidden_state = model->addOperand(&type18);
- auto aux_input = model->addOperand(&type15);
- auto fw_aux_weights = model->addOperand(&type16);
- auto bw_aux_weights = model->addOperand(&type16);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type19);
- auto bw_output = model->addOperand(&type19);
+ auto input = model->addOperand(&type20);
+ auto fw_weights = model->addOperand(&type19);
+ auto fw_recurrent_weights = model->addOperand(&type18);
+ auto fw_bias = model->addOperand(&type15);
+ auto fw_hidden_state = model->addOperand(&type16);
+ auto bw_weights = model->addOperand(&type19);
+ auto bw_recurrent_weights = model->addOperand(&type18);
+ auto bw_bias = model->addOperand(&type15);
+ auto bw_hidden_state = model->addOperand(&type16);
+ auto aux_input = model->addOperand(&type14);
+ auto fw_aux_weights = model->addOperand(&type14);
+ auto bw_aux_weights = model->addOperand(&type14);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type17);
+ auto bw_output = model->addOperand(&type17);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -153,12 +150,12 @@ void CreateModel_dynamic_output_shape(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input = model->addOperand(&type0);
auto fw_weights = model->addOperand(&type1);
@@ -170,13 +167,13 @@ void CreateModel_dynamic_output_shape(Model *model) {
auto bw_bias = model->addOperand(&type3);
auto bw_hidden_state = model->addOperand(&type4);
auto aux_input = model->addOperand(&type5);
- auto fw_aux_weights = model->addOperand(&type6);
- auto bw_aux_weights = model->addOperand(&type6);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type5);
- auto bw_output = model->addOperand(&type5);
+ auto fw_aux_weights = model->addOperand(&type5);
+ auto bw_aux_weights = model->addOperand(&type5);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type21);
+ auto bw_output = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -201,12 +198,12 @@ void CreateModel_dynamic_output_shape_relaxed(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input = model->addOperand(&type0);
auto fw_weights = model->addOperand(&type1);
@@ -218,13 +215,13 @@ void CreateModel_dynamic_output_shape_relaxed(Model *model) {
auto bw_bias = model->addOperand(&type3);
auto bw_hidden_state = model->addOperand(&type4);
auto aux_input = model->addOperand(&type5);
- auto fw_aux_weights = model->addOperand(&type6);
- auto bw_aux_weights = model->addOperand(&type6);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type5);
- auto bw_output = model->addOperand(&type5);
+ auto fw_aux_weights = model->addOperand(&type5);
+ auto bw_aux_weights = model->addOperand(&type5);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type21);
+ auto bw_output = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -248,34 +245,33 @@ inline bool is_ignored_dynamic_output_shape_relaxed(int i) {
}
void CreateModel_dynamic_output_shape_float16(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input = model->addOperand(&type22);
- auto fw_weights = model->addOperand(&type21);
- auto fw_recurrent_weights = model->addOperand(&type20);
- auto fw_bias = model->addOperand(&type17);
- auto fw_hidden_state = model->addOperand(&type18);
- auto bw_weights = model->addOperand(&type21);
- auto bw_recurrent_weights = model->addOperand(&type20);
- auto bw_bias = model->addOperand(&type17);
- auto bw_hidden_state = model->addOperand(&type18);
- auto aux_input = model->addOperand(&type15);
- auto fw_aux_weights = model->addOperand(&type16);
- auto bw_aux_weights = model->addOperand(&type16);
- auto activation = model->addOperand(&type8);
- auto time_major = model->addOperand(&type9);
- auto merge_outputs = model->addOperand(&type9);
- auto fw_output = model->addOperand(&type23);
- auto bw_output = model->addOperand(&type23);
+ auto input = model->addOperand(&type20);
+ auto fw_weights = model->addOperand(&type19);
+ auto fw_recurrent_weights = model->addOperand(&type18);
+ auto fw_bias = model->addOperand(&type15);
+ auto fw_hidden_state = model->addOperand(&type16);
+ auto bw_weights = model->addOperand(&type19);
+ auto bw_recurrent_weights = model->addOperand(&type18);
+ auto bw_bias = model->addOperand(&type15);
+ auto bw_hidden_state = model->addOperand(&type16);
+ auto aux_input = model->addOperand(&type14);
+ auto fw_aux_weights = model->addOperand(&type14);
+ auto bw_aux_weights = model->addOperand(&type14);
+ auto activation = model->addOperand(&type7);
+ auto time_major = model->addOperand(&type8);
+ auto merge_outputs = model->addOperand(&type8);
+ auto fw_output = model->addOperand(&type22);
+ auto bw_output = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation_init[] = {1};
model->setOperandValue(activation, activation_init, sizeof(int32_t) * 1);
@@ -298,17 +294,16 @@ inline bool is_ignored_dynamic_output_shape_float16(int i) {
void CreateModel_2(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
- OperandType type11(Type::TENSOR_FLOAT32, {16, 2, 16});
+ OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 16});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input1 = model->addOperand(&type10);
+ auto input1 = model->addOperand(&type9);
auto fw_weights1 = model->addOperand(&type1);
auto fw_recurrent_weights1 = model->addOperand(&type2);
auto fw_bias1 = model->addOperand(&type3);
@@ -318,13 +313,13 @@ void CreateModel_2(Model *model) {
auto bw_bias1 = model->addOperand(&type3);
auto bw_hidden_state1 = model->addOperand(&type4);
auto aux_input1 = model->addOperand(&type5);
- auto fw_aux_weights1 = model->addOperand(&type6);
- auto bw_aux_weights1 = model->addOperand(&type6);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type11);
- auto bw_output1 = model->addOperand(&type11);
+ auto fw_aux_weights1 = model->addOperand(&type5);
+ auto bw_aux_weights1 = model->addOperand(&type5);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type10);
+ auto bw_output1 = model->addOperand(&type10);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -347,17 +342,16 @@ inline bool is_ignored_2(int i) {
void CreateModel_relaxed_2(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
- OperandType type11(Type::TENSOR_FLOAT32, {16, 2, 16});
+ OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 16});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input1 = model->addOperand(&type10);
+ auto input1 = model->addOperand(&type9);
auto fw_weights1 = model->addOperand(&type1);
auto fw_recurrent_weights1 = model->addOperand(&type2);
auto fw_bias1 = model->addOperand(&type3);
@@ -367,13 +361,13 @@ void CreateModel_relaxed_2(Model *model) {
auto bw_bias1 = model->addOperand(&type3);
auto bw_hidden_state1 = model->addOperand(&type4);
auto aux_input1 = model->addOperand(&type5);
- auto fw_aux_weights1 = model->addOperand(&type6);
- auto bw_aux_weights1 = model->addOperand(&type6);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type11);
- auto bw_output1 = model->addOperand(&type11);
+ auto fw_aux_weights1 = model->addOperand(&type5);
+ auto bw_aux_weights1 = model->addOperand(&type5);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type10);
+ auto bw_output1 = model->addOperand(&type10);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -397,34 +391,33 @@ inline bool is_ignored_relaxed_2(int i) {
}
void CreateModel_float16_2(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type24(Type::TENSOR_FLOAT16, {16, 2, 16});
- OperandType type25(Type::TENSOR_FLOAT16, {16, 2, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type23(Type::TENSOR_FLOAT16, {16, 2, 16});
+ OperandType type24(Type::TENSOR_FLOAT16, {16, 2, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input1 = model->addOperand(&type25);
- auto fw_weights1 = model->addOperand(&type21);
- auto fw_recurrent_weights1 = model->addOperand(&type20);
- auto fw_bias1 = model->addOperand(&type17);
- auto fw_hidden_state1 = model->addOperand(&type18);
- auto bw_weights1 = model->addOperand(&type21);
- auto bw_recurrent_weights1 = model->addOperand(&type20);
- auto bw_bias1 = model->addOperand(&type17);
- auto bw_hidden_state1 = model->addOperand(&type18);
- auto aux_input1 = model->addOperand(&type15);
- auto fw_aux_weights1 = model->addOperand(&type16);
- auto bw_aux_weights1 = model->addOperand(&type16);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type24);
- auto bw_output1 = model->addOperand(&type24);
+ auto input1 = model->addOperand(&type24);
+ auto fw_weights1 = model->addOperand(&type19);
+ auto fw_recurrent_weights1 = model->addOperand(&type18);
+ auto fw_bias1 = model->addOperand(&type15);
+ auto fw_hidden_state1 = model->addOperand(&type16);
+ auto bw_weights1 = model->addOperand(&type19);
+ auto bw_recurrent_weights1 = model->addOperand(&type18);
+ auto bw_bias1 = model->addOperand(&type15);
+ auto bw_hidden_state1 = model->addOperand(&type16);
+ auto aux_input1 = model->addOperand(&type14);
+ auto fw_aux_weights1 = model->addOperand(&type14);
+ auto bw_aux_weights1 = model->addOperand(&type14);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type23);
+ auto bw_output1 = model->addOperand(&type23);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -447,16 +440,16 @@ inline bool is_ignored_float16_2(int i) {
void CreateModel_dynamic_output_shape_2(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input1 = model->addOperand(&type10);
+ auto input1 = model->addOperand(&type9);
auto fw_weights1 = model->addOperand(&type1);
auto fw_recurrent_weights1 = model->addOperand(&type2);
auto fw_bias1 = model->addOperand(&type3);
@@ -466,13 +459,13 @@ void CreateModel_dynamic_output_shape_2(Model *model) {
auto bw_bias1 = model->addOperand(&type3);
auto bw_hidden_state1 = model->addOperand(&type4);
auto aux_input1 = model->addOperand(&type5);
- auto fw_aux_weights1 = model->addOperand(&type6);
- auto bw_aux_weights1 = model->addOperand(&type6);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type5);
- auto bw_output1 = model->addOperand(&type5);
+ auto fw_aux_weights1 = model->addOperand(&type5);
+ auto bw_aux_weights1 = model->addOperand(&type5);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type21);
+ auto bw_output1 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -495,16 +488,16 @@ inline bool is_ignored_dynamic_output_shape_2(int i) {
void CreateModel_dynamic_output_shape_relaxed_2(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input1 = model->addOperand(&type10);
+ auto input1 = model->addOperand(&type9);
auto fw_weights1 = model->addOperand(&type1);
auto fw_recurrent_weights1 = model->addOperand(&type2);
auto fw_bias1 = model->addOperand(&type3);
@@ -514,13 +507,13 @@ void CreateModel_dynamic_output_shape_relaxed_2(Model *model) {
auto bw_bias1 = model->addOperand(&type3);
auto bw_hidden_state1 = model->addOperand(&type4);
auto aux_input1 = model->addOperand(&type5);
- auto fw_aux_weights1 = model->addOperand(&type6);
- auto bw_aux_weights1 = model->addOperand(&type6);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type5);
- auto bw_output1 = model->addOperand(&type5);
+ auto fw_aux_weights1 = model->addOperand(&type5);
+ auto bw_aux_weights1 = model->addOperand(&type5);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type21);
+ auto bw_output1 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -544,34 +537,33 @@ inline bool is_ignored_dynamic_output_shape_relaxed_2(int i) {
}
void CreateModel_dynamic_output_shape_float16_2(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type25(Type::TENSOR_FLOAT16, {16, 2, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type24(Type::TENSOR_FLOAT16, {16, 2, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input1 = model->addOperand(&type25);
- auto fw_weights1 = model->addOperand(&type21);
- auto fw_recurrent_weights1 = model->addOperand(&type20);
- auto fw_bias1 = model->addOperand(&type17);
- auto fw_hidden_state1 = model->addOperand(&type18);
- auto bw_weights1 = model->addOperand(&type21);
- auto bw_recurrent_weights1 = model->addOperand(&type20);
- auto bw_bias1 = model->addOperand(&type17);
- auto bw_hidden_state1 = model->addOperand(&type18);
- auto aux_input1 = model->addOperand(&type15);
- auto fw_aux_weights1 = model->addOperand(&type16);
- auto bw_aux_weights1 = model->addOperand(&type16);
- auto activation1 = model->addOperand(&type8);
- auto time_major1 = model->addOperand(&type9);
- auto merge_outputs1 = model->addOperand(&type9);
- auto fw_output1 = model->addOperand(&type23);
- auto bw_output1 = model->addOperand(&type23);
+ auto input1 = model->addOperand(&type24);
+ auto fw_weights1 = model->addOperand(&type19);
+ auto fw_recurrent_weights1 = model->addOperand(&type18);
+ auto fw_bias1 = model->addOperand(&type15);
+ auto fw_hidden_state1 = model->addOperand(&type16);
+ auto bw_weights1 = model->addOperand(&type19);
+ auto bw_recurrent_weights1 = model->addOperand(&type18);
+ auto bw_bias1 = model->addOperand(&type15);
+ auto bw_hidden_state1 = model->addOperand(&type16);
+ auto aux_input1 = model->addOperand(&type14);
+ auto fw_aux_weights1 = model->addOperand(&type14);
+ auto bw_aux_weights1 = model->addOperand(&type14);
+ auto activation1 = model->addOperand(&type7);
+ auto time_major1 = model->addOperand(&type8);
+ auto merge_outputs1 = model->addOperand(&type8);
+ auto fw_output1 = model->addOperand(&type22);
+ auto bw_output1 = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation1_init[] = {1};
model->setOperandValue(activation1, activation1_init, sizeof(int32_t) * 1);
@@ -594,17 +586,16 @@ inline bool is_ignored_dynamic_output_shape_float16_2(int i) {
void CreateModel_3(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
- OperandType type12(Type::TENSOR_FLOAT32, {16, 2, 32});
+ OperandType type11(Type::TENSOR_FLOAT32, {16, 2, 32});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input2 = model->addOperand(&type10);
+ auto input2 = model->addOperand(&type9);
auto fw_weights2 = model->addOperand(&type1);
auto fw_recurrent_weights2 = model->addOperand(&type2);
auto fw_bias2 = model->addOperand(&type3);
@@ -614,12 +605,12 @@ void CreateModel_3(Model *model) {
auto bw_bias2 = model->addOperand(&type3);
auto bw_hidden_state2 = model->addOperand(&type4);
auto aux_input2 = model->addOperand(&type5);
- auto fw_aux_weights2 = model->addOperand(&type6);
- auto bw_aux_weights2 = model->addOperand(&type6);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type12);
+ auto fw_aux_weights2 = model->addOperand(&type5);
+ auto bw_aux_weights2 = model->addOperand(&type5);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type11);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -642,17 +633,16 @@ inline bool is_ignored_3(int i) {
void CreateModel_relaxed_3(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
- OperandType type12(Type::TENSOR_FLOAT32, {16, 2, 32});
+ OperandType type11(Type::TENSOR_FLOAT32, {16, 2, 32});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input2 = model->addOperand(&type10);
+ auto input2 = model->addOperand(&type9);
auto fw_weights2 = model->addOperand(&type1);
auto fw_recurrent_weights2 = model->addOperand(&type2);
auto fw_bias2 = model->addOperand(&type3);
@@ -662,12 +652,12 @@ void CreateModel_relaxed_3(Model *model) {
auto bw_bias2 = model->addOperand(&type3);
auto bw_hidden_state2 = model->addOperand(&type4);
auto aux_input2 = model->addOperand(&type5);
- auto fw_aux_weights2 = model->addOperand(&type6);
- auto bw_aux_weights2 = model->addOperand(&type6);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type12);
+ auto fw_aux_weights2 = model->addOperand(&type5);
+ auto bw_aux_weights2 = model->addOperand(&type5);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type11);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -691,33 +681,32 @@ inline bool is_ignored_relaxed_3(int i) {
}
void CreateModel_float16_3(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type25(Type::TENSOR_FLOAT16, {16, 2, 8});
- OperandType type26(Type::TENSOR_FLOAT16, {16, 2, 32});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type24(Type::TENSOR_FLOAT16, {16, 2, 8});
+ OperandType type25(Type::TENSOR_FLOAT16, {16, 2, 32});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input2 = model->addOperand(&type25);
- auto fw_weights2 = model->addOperand(&type21);
- auto fw_recurrent_weights2 = model->addOperand(&type20);
- auto fw_bias2 = model->addOperand(&type17);
- auto fw_hidden_state2 = model->addOperand(&type18);
- auto bw_weights2 = model->addOperand(&type21);
- auto bw_recurrent_weights2 = model->addOperand(&type20);
- auto bw_bias2 = model->addOperand(&type17);
- auto bw_hidden_state2 = model->addOperand(&type18);
- auto aux_input2 = model->addOperand(&type15);
- auto fw_aux_weights2 = model->addOperand(&type16);
- auto bw_aux_weights2 = model->addOperand(&type16);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type26);
+ auto input2 = model->addOperand(&type24);
+ auto fw_weights2 = model->addOperand(&type19);
+ auto fw_recurrent_weights2 = model->addOperand(&type18);
+ auto fw_bias2 = model->addOperand(&type15);
+ auto fw_hidden_state2 = model->addOperand(&type16);
+ auto bw_weights2 = model->addOperand(&type19);
+ auto bw_recurrent_weights2 = model->addOperand(&type18);
+ auto bw_bias2 = model->addOperand(&type15);
+ auto bw_hidden_state2 = model->addOperand(&type16);
+ auto aux_input2 = model->addOperand(&type14);
+ auto fw_aux_weights2 = model->addOperand(&type14);
+ auto bw_aux_weights2 = model->addOperand(&type14);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type25);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -740,16 +729,16 @@ inline bool is_ignored_float16_3(int i) {
void CreateModel_dynamic_output_shape_3(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input2 = model->addOperand(&type10);
+ auto input2 = model->addOperand(&type9);
auto fw_weights2 = model->addOperand(&type1);
auto fw_recurrent_weights2 = model->addOperand(&type2);
auto fw_bias2 = model->addOperand(&type3);
@@ -759,12 +748,12 @@ void CreateModel_dynamic_output_shape_3(Model *model) {
auto bw_bias2 = model->addOperand(&type3);
auto bw_hidden_state2 = model->addOperand(&type4);
auto aux_input2 = model->addOperand(&type5);
- auto fw_aux_weights2 = model->addOperand(&type6);
- auto bw_aux_weights2 = model->addOperand(&type6);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type5);
+ auto fw_aux_weights2 = model->addOperand(&type5);
+ auto bw_aux_weights2 = model->addOperand(&type5);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -787,16 +776,16 @@ inline bool is_ignored_dynamic_output_shape_3(int i) {
void CreateModel_dynamic_output_shape_relaxed_3(Model *model) {
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
- OperandType type10(Type::TENSOR_FLOAT32, {16, 2, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {16, 2, 8});
// Phase 1, operands
- auto input2 = model->addOperand(&type10);
+ auto input2 = model->addOperand(&type9);
auto fw_weights2 = model->addOperand(&type1);
auto fw_recurrent_weights2 = model->addOperand(&type2);
auto fw_bias2 = model->addOperand(&type3);
@@ -806,12 +795,12 @@ void CreateModel_dynamic_output_shape_relaxed_3(Model *model) {
auto bw_bias2 = model->addOperand(&type3);
auto bw_hidden_state2 = model->addOperand(&type4);
auto aux_input2 = model->addOperand(&type5);
- auto fw_aux_weights2 = model->addOperand(&type6);
- auto bw_aux_weights2 = model->addOperand(&type6);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type5);
+ auto fw_aux_weights2 = model->addOperand(&type5);
+ auto bw_aux_weights2 = model->addOperand(&type5);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -835,33 +824,32 @@ inline bool is_ignored_dynamic_output_shape_relaxed_3(int i) {
}
void CreateModel_dynamic_output_shape_float16_3(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type25(Type::TENSOR_FLOAT16, {16, 2, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type24(Type::TENSOR_FLOAT16, {16, 2, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input2 = model->addOperand(&type25);
- auto fw_weights2 = model->addOperand(&type21);
- auto fw_recurrent_weights2 = model->addOperand(&type20);
- auto fw_bias2 = model->addOperand(&type17);
- auto fw_hidden_state2 = model->addOperand(&type18);
- auto bw_weights2 = model->addOperand(&type21);
- auto bw_recurrent_weights2 = model->addOperand(&type20);
- auto bw_bias2 = model->addOperand(&type17);
- auto bw_hidden_state2 = model->addOperand(&type18);
- auto aux_input2 = model->addOperand(&type15);
- auto fw_aux_weights2 = model->addOperand(&type16);
- auto bw_aux_weights2 = model->addOperand(&type16);
- auto activation2 = model->addOperand(&type8);
- auto time_major2 = model->addOperand(&type9);
- auto merge_outputs2 = model->addOperand(&type9);
- auto fw_output2 = model->addOperand(&type23);
+ auto input2 = model->addOperand(&type24);
+ auto fw_weights2 = model->addOperand(&type19);
+ auto fw_recurrent_weights2 = model->addOperand(&type18);
+ auto fw_bias2 = model->addOperand(&type15);
+ auto fw_hidden_state2 = model->addOperand(&type16);
+ auto bw_weights2 = model->addOperand(&type19);
+ auto bw_recurrent_weights2 = model->addOperand(&type18);
+ auto bw_bias2 = model->addOperand(&type15);
+ auto bw_hidden_state2 = model->addOperand(&type16);
+ auto aux_input2 = model->addOperand(&type14);
+ auto fw_aux_weights2 = model->addOperand(&type14);
+ auto bw_aux_weights2 = model->addOperand(&type14);
+ auto activation2 = model->addOperand(&type7);
+ auto time_major2 = model->addOperand(&type8);
+ auto merge_outputs2 = model->addOperand(&type8);
+ auto fw_output2 = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation2_init[] = {1};
model->setOperandValue(activation2, activation2_init, sizeof(int32_t) * 1);
@@ -888,11 +876,10 @@ void CreateModel_4(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input3 = model->addOperand(&type0);
auto fw_weights3 = model->addOperand(&type1);
@@ -904,13 +891,13 @@ void CreateModel_4(Model *model) {
auto bw_bias3 = model->addOperand(&type3);
auto bw_hidden_state3 = model->addOperand(&type4);
auto aux_input3 = model->addOperand(&type5);
- auto fw_aux_weights3 = model->addOperand(&type6);
- auto bw_aux_weights3 = model->addOperand(&type6);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type7);
- auto bw_output2 = model->addOperand(&type7);
+ auto fw_aux_weights3 = model->addOperand(&type5);
+ auto bw_aux_weights3 = model->addOperand(&type5);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type6);
+ auto bw_output2 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -937,11 +924,10 @@ void CreateModel_relaxed_4(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input3 = model->addOperand(&type0);
auto fw_weights3 = model->addOperand(&type1);
@@ -953,13 +939,13 @@ void CreateModel_relaxed_4(Model *model) {
auto bw_bias3 = model->addOperand(&type3);
auto bw_hidden_state3 = model->addOperand(&type4);
auto aux_input3 = model->addOperand(&type5);
- auto fw_aux_weights3 = model->addOperand(&type6);
- auto bw_aux_weights3 = model->addOperand(&type6);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type7);
- auto bw_output2 = model->addOperand(&type7);
+ auto fw_aux_weights3 = model->addOperand(&type5);
+ auto bw_aux_weights3 = model->addOperand(&type5);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type6);
+ auto bw_output2 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -983,34 +969,33 @@ inline bool is_ignored_relaxed_4(int i) {
}
void CreateModel_float16_4(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type19(Type::TENSOR_FLOAT16, {2, 16, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type17(Type::TENSOR_FLOAT16, {2, 16, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input3 = model->addOperand(&type22);
- auto fw_weights3 = model->addOperand(&type21);
- auto fw_recurrent_weights3 = model->addOperand(&type20);
- auto fw_bias3 = model->addOperand(&type17);
- auto fw_hidden_state3 = model->addOperand(&type18);
- auto bw_weights3 = model->addOperand(&type21);
- auto bw_recurrent_weights3 = model->addOperand(&type20);
- auto bw_bias3 = model->addOperand(&type17);
- auto bw_hidden_state3 = model->addOperand(&type18);
- auto aux_input3 = model->addOperand(&type15);
- auto fw_aux_weights3 = model->addOperand(&type16);
- auto bw_aux_weights3 = model->addOperand(&type16);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type19);
- auto bw_output2 = model->addOperand(&type19);
+ auto input3 = model->addOperand(&type20);
+ auto fw_weights3 = model->addOperand(&type19);
+ auto fw_recurrent_weights3 = model->addOperand(&type18);
+ auto fw_bias3 = model->addOperand(&type15);
+ auto fw_hidden_state3 = model->addOperand(&type16);
+ auto bw_weights3 = model->addOperand(&type19);
+ auto bw_recurrent_weights3 = model->addOperand(&type18);
+ auto bw_bias3 = model->addOperand(&type15);
+ auto bw_hidden_state3 = model->addOperand(&type16);
+ auto aux_input3 = model->addOperand(&type14);
+ auto fw_aux_weights3 = model->addOperand(&type14);
+ auto bw_aux_weights3 = model->addOperand(&type14);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type17);
+ auto bw_output2 = model->addOperand(&type17);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -1035,12 +1020,12 @@ void CreateModel_dynamic_output_shape_4(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input3 = model->addOperand(&type0);
auto fw_weights3 = model->addOperand(&type1);
@@ -1052,13 +1037,13 @@ void CreateModel_dynamic_output_shape_4(Model *model) {
auto bw_bias3 = model->addOperand(&type3);
auto bw_hidden_state3 = model->addOperand(&type4);
auto aux_input3 = model->addOperand(&type5);
- auto fw_aux_weights3 = model->addOperand(&type6);
- auto bw_aux_weights3 = model->addOperand(&type6);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type5);
- auto bw_output2 = model->addOperand(&type5);
+ auto fw_aux_weights3 = model->addOperand(&type5);
+ auto bw_aux_weights3 = model->addOperand(&type5);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type21);
+ auto bw_output2 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -1083,12 +1068,12 @@ void CreateModel_dynamic_output_shape_relaxed_4(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type6(Type::TENSOR_FLOAT32, {0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type5(Type::TENSOR_FLOAT32, {0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input3 = model->addOperand(&type0);
auto fw_weights3 = model->addOperand(&type1);
@@ -1100,13 +1085,13 @@ void CreateModel_dynamic_output_shape_relaxed_4(Model *model) {
auto bw_bias3 = model->addOperand(&type3);
auto bw_hidden_state3 = model->addOperand(&type4);
auto aux_input3 = model->addOperand(&type5);
- auto fw_aux_weights3 = model->addOperand(&type6);
- auto bw_aux_weights3 = model->addOperand(&type6);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type5);
- auto bw_output2 = model->addOperand(&type5);
+ auto fw_aux_weights3 = model->addOperand(&type5);
+ auto bw_aux_weights3 = model->addOperand(&type5);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type21);
+ auto bw_output2 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -1130,34 +1115,33 @@ inline bool is_ignored_dynamic_output_shape_relaxed_4(int i) {
}
void CreateModel_dynamic_output_shape_float16_4(Model *model) {
- OperandType type15(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type16(Type::TENSOR_FLOAT16, {0, 0});
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type14(Type::TENSOR_FLOAT16, {0});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input3 = model->addOperand(&type22);
- auto fw_weights3 = model->addOperand(&type21);
- auto fw_recurrent_weights3 = model->addOperand(&type20);
- auto fw_bias3 = model->addOperand(&type17);
- auto fw_hidden_state3 = model->addOperand(&type18);
- auto bw_weights3 = model->addOperand(&type21);
- auto bw_recurrent_weights3 = model->addOperand(&type20);
- auto bw_bias3 = model->addOperand(&type17);
- auto bw_hidden_state3 = model->addOperand(&type18);
- auto aux_input3 = model->addOperand(&type15);
- auto fw_aux_weights3 = model->addOperand(&type16);
- auto bw_aux_weights3 = model->addOperand(&type16);
- auto activation3 = model->addOperand(&type8);
- auto time_major3 = model->addOperand(&type9);
- auto merge_outputs3 = model->addOperand(&type9);
- auto fw_output3 = model->addOperand(&type23);
- auto bw_output2 = model->addOperand(&type23);
+ auto input3 = model->addOperand(&type20);
+ auto fw_weights3 = model->addOperand(&type19);
+ auto fw_recurrent_weights3 = model->addOperand(&type18);
+ auto fw_bias3 = model->addOperand(&type15);
+ auto fw_hidden_state3 = model->addOperand(&type16);
+ auto bw_weights3 = model->addOperand(&type19);
+ auto bw_recurrent_weights3 = model->addOperand(&type18);
+ auto bw_bias3 = model->addOperand(&type15);
+ auto bw_hidden_state3 = model->addOperand(&type16);
+ auto aux_input3 = model->addOperand(&type14);
+ auto fw_aux_weights3 = model->addOperand(&type14);
+ auto bw_aux_weights3 = model->addOperand(&type14);
+ auto activation3 = model->addOperand(&type7);
+ auto time_major3 = model->addOperand(&type8);
+ auto merge_outputs3 = model->addOperand(&type8);
+ auto fw_output3 = model->addOperand(&type22);
+ auto bw_output2 = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation3_init[] = {1};
model->setOperandValue(activation3, activation3_init, sizeof(int32_t) * 1);
@@ -1184,9 +1168,9 @@ void CreateModel_5(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input4 = model->addOperand(&type0);
auto fw_weights4 = model->addOperand(&type1);
@@ -1200,11 +1184,11 @@ void CreateModel_5(Model *model) {
auto aux_input4 = model->addOperand(&type0);
auto fw_aux_weights4 = model->addOperand(&type1);
auto bw_aux_weights4 = model->addOperand(&type1);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type7);
- auto bw_output3 = model->addOperand(&type7);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type6);
+ auto bw_output3 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1231,9 +1215,9 @@ void CreateModel_relaxed_5(Model *model) {
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input4 = model->addOperand(&type0);
auto fw_weights4 = model->addOperand(&type1);
@@ -1247,11 +1231,11 @@ void CreateModel_relaxed_5(Model *model) {
auto aux_input4 = model->addOperand(&type0);
auto fw_aux_weights4 = model->addOperand(&type1);
auto bw_aux_weights4 = model->addOperand(&type1);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type7);
- auto bw_output3 = model->addOperand(&type7);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type6);
+ auto bw_output3 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1275,32 +1259,32 @@ inline bool is_ignored_relaxed_5(int i) {
}
void CreateModel_float16_5(Model *model) {
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type19(Type::TENSOR_FLOAT16, {2, 16, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type17(Type::TENSOR_FLOAT16, {2, 16, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input4 = model->addOperand(&type22);
- auto fw_weights4 = model->addOperand(&type21);
- auto fw_recurrent_weights4 = model->addOperand(&type20);
- auto fw_bias4 = model->addOperand(&type17);
- auto fw_hidden_state4 = model->addOperand(&type18);
- auto bw_weights4 = model->addOperand(&type21);
- auto bw_recurrent_weights4 = model->addOperand(&type20);
- auto bw_bias4 = model->addOperand(&type17);
- auto bw_hidden_state4 = model->addOperand(&type18);
- auto aux_input4 = model->addOperand(&type22);
- auto fw_aux_weights4 = model->addOperand(&type21);
- auto bw_aux_weights4 = model->addOperand(&type21);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type19);
- auto bw_output3 = model->addOperand(&type19);
+ auto input4 = model->addOperand(&type20);
+ auto fw_weights4 = model->addOperand(&type19);
+ auto fw_recurrent_weights4 = model->addOperand(&type18);
+ auto fw_bias4 = model->addOperand(&type15);
+ auto fw_hidden_state4 = model->addOperand(&type16);
+ auto bw_weights4 = model->addOperand(&type19);
+ auto bw_recurrent_weights4 = model->addOperand(&type18);
+ auto bw_bias4 = model->addOperand(&type15);
+ auto bw_hidden_state4 = model->addOperand(&type16);
+ auto aux_input4 = model->addOperand(&type20);
+ auto fw_aux_weights4 = model->addOperand(&type19);
+ auto bw_aux_weights4 = model->addOperand(&type19);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type17);
+ auto bw_output3 = model->addOperand(&type17);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1325,11 +1309,11 @@ void CreateModel_dynamic_output_shape_5(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input4 = model->addOperand(&type0);
auto fw_weights4 = model->addOperand(&type1);
@@ -1343,11 +1327,11 @@ void CreateModel_dynamic_output_shape_5(Model *model) {
auto aux_input4 = model->addOperand(&type0);
auto fw_aux_weights4 = model->addOperand(&type1);
auto bw_aux_weights4 = model->addOperand(&type1);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type5);
- auto bw_output3 = model->addOperand(&type5);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type21);
+ auto bw_output3 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1372,11 +1356,11 @@ void CreateModel_dynamic_output_shape_relaxed_5(Model *model) {
OperandType type0(Type::TENSOR_FLOAT32, {2, 16, 8});
OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
auto input4 = model->addOperand(&type0);
auto fw_weights4 = model->addOperand(&type1);
@@ -1390,11 +1374,11 @@ void CreateModel_dynamic_output_shape_relaxed_5(Model *model) {
auto aux_input4 = model->addOperand(&type0);
auto fw_aux_weights4 = model->addOperand(&type1);
auto bw_aux_weights4 = model->addOperand(&type1);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type5);
- auto bw_output3 = model->addOperand(&type5);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type21);
+ auto bw_output3 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1418,32 +1402,32 @@ inline bool is_ignored_dynamic_output_shape_relaxed_5(int i) {
}
void CreateModel_dynamic_output_shape_float16_5(Model *model) {
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type21(Type::TENSOR_FLOAT16, {16, 8});
- OperandType type22(Type::TENSOR_FLOAT16, {2, 16, 8});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type19(Type::TENSOR_FLOAT16, {16, 8});
+ OperandType type20(Type::TENSOR_FLOAT16, {2, 16, 8});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input4 = model->addOperand(&type22);
- auto fw_weights4 = model->addOperand(&type21);
- auto fw_recurrent_weights4 = model->addOperand(&type20);
- auto fw_bias4 = model->addOperand(&type17);
- auto fw_hidden_state4 = model->addOperand(&type18);
- auto bw_weights4 = model->addOperand(&type21);
- auto bw_recurrent_weights4 = model->addOperand(&type20);
- auto bw_bias4 = model->addOperand(&type17);
- auto bw_hidden_state4 = model->addOperand(&type18);
- auto aux_input4 = model->addOperand(&type22);
- auto fw_aux_weights4 = model->addOperand(&type21);
- auto bw_aux_weights4 = model->addOperand(&type21);
- auto activation4 = model->addOperand(&type8);
- auto time_major4 = model->addOperand(&type9);
- auto merge_outputs4 = model->addOperand(&type9);
- auto fw_output4 = model->addOperand(&type23);
- auto bw_output3 = model->addOperand(&type23);
+ auto input4 = model->addOperand(&type20);
+ auto fw_weights4 = model->addOperand(&type19);
+ auto fw_recurrent_weights4 = model->addOperand(&type18);
+ auto fw_bias4 = model->addOperand(&type15);
+ auto fw_hidden_state4 = model->addOperand(&type16);
+ auto bw_weights4 = model->addOperand(&type19);
+ auto bw_recurrent_weights4 = model->addOperand(&type18);
+ auto bw_bias4 = model->addOperand(&type15);
+ auto bw_hidden_state4 = model->addOperand(&type16);
+ auto aux_input4 = model->addOperand(&type20);
+ auto fw_aux_weights4 = model->addOperand(&type19);
+ auto bw_aux_weights4 = model->addOperand(&type19);
+ auto activation4 = model->addOperand(&type7);
+ auto time_major4 = model->addOperand(&type8);
+ auto merge_outputs4 = model->addOperand(&type8);
+ auto fw_output4 = model->addOperand(&type22);
+ auto bw_output3 = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation4_init[] = {1};
model->setOperandValue(activation4, activation4_init, sizeof(int32_t) * 1);
@@ -1465,32 +1449,32 @@ inline bool is_ignored_dynamic_output_shape_float16_5(int i) {
}
void CreateModel_6(Model *model) {
- OperandType type13(Type::TENSOR_FLOAT32, {2, 16, 4});
- OperandType type14(Type::TENSOR_FLOAT32, {16, 4});
+ OperandType type12(Type::TENSOR_FLOAT32, {2, 16, 4});
+ OperandType type13(Type::TENSOR_FLOAT32, {16, 4});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type13);
- auto fw_weights5 = model->addOperand(&type14);
+ auto input5 = model->addOperand(&type12);
+ auto fw_weights5 = model->addOperand(&type13);
auto fw_recurrent_weights5 = model->addOperand(&type2);
auto fw_bias5 = model->addOperand(&type3);
auto fw_hidden_state5 = model->addOperand(&type4);
- auto bw_weights5 = model->addOperand(&type14);
+ auto bw_weights5 = model->addOperand(&type13);
auto bw_recurrent_weights5 = model->addOperand(&type2);
auto bw_bias5 = model->addOperand(&type3);
auto bw_hidden_state5 = model->addOperand(&type4);
- auto aux_input5 = model->addOperand(&type13);
- auto fw_aux_weights5 = model->addOperand(&type14);
- auto bw_aux_weights5 = model->addOperand(&type14);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type7);
- auto bw_output4 = model->addOperand(&type7);
+ auto aux_input5 = model->addOperand(&type12);
+ auto fw_aux_weights5 = model->addOperand(&type13);
+ auto bw_aux_weights5 = model->addOperand(&type13);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type6);
+ auto bw_output4 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
@@ -1512,32 +1496,32 @@ inline bool is_ignored_6(int i) {
}
void CreateModel_relaxed_6(Model *model) {
- OperandType type13(Type::TENSOR_FLOAT32, {2, 16, 4});
- OperandType type14(Type::TENSOR_FLOAT32, {16, 4});
+ OperandType type12(Type::TENSOR_FLOAT32, {2, 16, 4});
+ OperandType type13(Type::TENSOR_FLOAT32, {16, 4});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type7(Type::TENSOR_FLOAT32, {2, 16, 16});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type6(Type::TENSOR_FLOAT32, {2, 16, 16});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type13);
- auto fw_weights5 = model->addOperand(&type14);
+ auto input5 = model->addOperand(&type12);
+ auto fw_weights5 = model->addOperand(&type13);
auto fw_recurrent_weights5 = model->addOperand(&type2);
auto fw_bias5 = model->addOperand(&type3);
auto fw_hidden_state5 = model->addOperand(&type4);
- auto bw_weights5 = model->addOperand(&type14);
+ auto bw_weights5 = model->addOperand(&type13);
auto bw_recurrent_weights5 = model->addOperand(&type2);
auto bw_bias5 = model->addOperand(&type3);
auto bw_hidden_state5 = model->addOperand(&type4);
- auto aux_input5 = model->addOperand(&type13);
- auto fw_aux_weights5 = model->addOperand(&type14);
- auto bw_aux_weights5 = model->addOperand(&type14);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type7);
- auto bw_output4 = model->addOperand(&type7);
+ auto aux_input5 = model->addOperand(&type12);
+ auto fw_aux_weights5 = model->addOperand(&type13);
+ auto bw_aux_weights5 = model->addOperand(&type13);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type6);
+ auto bw_output4 = model->addOperand(&type6);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
@@ -1561,32 +1545,32 @@ inline bool is_ignored_relaxed_6(int i) {
}
void CreateModel_float16_6(Model *model) {
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type19(Type::TENSOR_FLOAT16, {2, 16, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type27(Type::TENSOR_FLOAT16, {2, 16, 4});
- OperandType type28(Type::TENSOR_FLOAT16, {16, 4});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type17(Type::TENSOR_FLOAT16, {2, 16, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type26(Type::TENSOR_FLOAT16, {2, 16, 4});
+ OperandType type27(Type::TENSOR_FLOAT16, {16, 4});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type27);
- auto fw_weights5 = model->addOperand(&type28);
- auto fw_recurrent_weights5 = model->addOperand(&type20);
- auto fw_bias5 = model->addOperand(&type17);
- auto fw_hidden_state5 = model->addOperand(&type18);
- auto bw_weights5 = model->addOperand(&type28);
- auto bw_recurrent_weights5 = model->addOperand(&type20);
- auto bw_bias5 = model->addOperand(&type17);
- auto bw_hidden_state5 = model->addOperand(&type18);
- auto aux_input5 = model->addOperand(&type27);
- auto fw_aux_weights5 = model->addOperand(&type28);
- auto bw_aux_weights5 = model->addOperand(&type28);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type19);
- auto bw_output4 = model->addOperand(&type19);
+ auto input5 = model->addOperand(&type26);
+ auto fw_weights5 = model->addOperand(&type27);
+ auto fw_recurrent_weights5 = model->addOperand(&type18);
+ auto fw_bias5 = model->addOperand(&type15);
+ auto fw_hidden_state5 = model->addOperand(&type16);
+ auto bw_weights5 = model->addOperand(&type27);
+ auto bw_recurrent_weights5 = model->addOperand(&type18);
+ auto bw_bias5 = model->addOperand(&type15);
+ auto bw_hidden_state5 = model->addOperand(&type16);
+ auto aux_input5 = model->addOperand(&type26);
+ auto fw_aux_weights5 = model->addOperand(&type27);
+ auto bw_aux_weights5 = model->addOperand(&type27);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type17);
+ auto bw_output4 = model->addOperand(&type17);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
@@ -1608,32 +1592,32 @@ inline bool is_ignored_float16_6(int i) {
}
void CreateModel_dynamic_output_shape_6(Model *model) {
- OperandType type13(Type::TENSOR_FLOAT32, {2, 16, 4});
- OperandType type14(Type::TENSOR_FLOAT32, {16, 4});
+ OperandType type12(Type::TENSOR_FLOAT32, {2, 16, 4});
+ OperandType type13(Type::TENSOR_FLOAT32, {16, 4});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type13);
- auto fw_weights5 = model->addOperand(&type14);
+ auto input5 = model->addOperand(&type12);
+ auto fw_weights5 = model->addOperand(&type13);
auto fw_recurrent_weights5 = model->addOperand(&type2);
auto fw_bias5 = model->addOperand(&type3);
auto fw_hidden_state5 = model->addOperand(&type4);
- auto bw_weights5 = model->addOperand(&type14);
+ auto bw_weights5 = model->addOperand(&type13);
auto bw_recurrent_weights5 = model->addOperand(&type2);
auto bw_bias5 = model->addOperand(&type3);
auto bw_hidden_state5 = model->addOperand(&type4);
- auto aux_input5 = model->addOperand(&type13);
- auto fw_aux_weights5 = model->addOperand(&type14);
- auto bw_aux_weights5 = model->addOperand(&type14);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type5);
- auto bw_output4 = model->addOperand(&type5);
+ auto aux_input5 = model->addOperand(&type12);
+ auto fw_aux_weights5 = model->addOperand(&type13);
+ auto bw_aux_weights5 = model->addOperand(&type13);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type21);
+ auto bw_output4 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
@@ -1655,32 +1639,32 @@ inline bool is_ignored_dynamic_output_shape_6(int i) {
}
void CreateModel_dynamic_output_shape_relaxed_6(Model *model) {
- OperandType type13(Type::TENSOR_FLOAT32, {2, 16, 4});
- OperandType type14(Type::TENSOR_FLOAT32, {16, 4});
+ OperandType type12(Type::TENSOR_FLOAT32, {2, 16, 4});
+ OperandType type13(Type::TENSOR_FLOAT32, {16, 4});
OperandType type2(Type::TENSOR_FLOAT32, {16, 16});
+ OperandType type21(Type::TENSOR_FLOAT32, {0, 0, 0});
OperandType type3(Type::TENSOR_FLOAT32, {16});
OperandType type4(Type::TENSOR_FLOAT32, {2, 16});
- OperandType type5(Type::TENSOR_FLOAT32, {0, 0, 0});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type13);
- auto fw_weights5 = model->addOperand(&type14);
+ auto input5 = model->addOperand(&type12);
+ auto fw_weights5 = model->addOperand(&type13);
auto fw_recurrent_weights5 = model->addOperand(&type2);
auto fw_bias5 = model->addOperand(&type3);
auto fw_hidden_state5 = model->addOperand(&type4);
- auto bw_weights5 = model->addOperand(&type14);
+ auto bw_weights5 = model->addOperand(&type13);
auto bw_recurrent_weights5 = model->addOperand(&type2);
auto bw_bias5 = model->addOperand(&type3);
auto bw_hidden_state5 = model->addOperand(&type4);
- auto aux_input5 = model->addOperand(&type13);
- auto fw_aux_weights5 = model->addOperand(&type14);
- auto bw_aux_weights5 = model->addOperand(&type14);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type5);
- auto bw_output4 = model->addOperand(&type5);
+ auto aux_input5 = model->addOperand(&type12);
+ auto fw_aux_weights5 = model->addOperand(&type13);
+ auto bw_aux_weights5 = model->addOperand(&type13);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type21);
+ auto bw_output4 = model->addOperand(&type21);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
@@ -1704,32 +1688,32 @@ inline bool is_ignored_dynamic_output_shape_relaxed_6(int i) {
}
void CreateModel_dynamic_output_shape_float16_6(Model *model) {
- OperandType type17(Type::TENSOR_FLOAT16, {16});
- OperandType type18(Type::TENSOR_FLOAT16, {2, 16});
- OperandType type20(Type::TENSOR_FLOAT16, {16, 16});
- OperandType type23(Type::TENSOR_FLOAT16, {0, 0, 0});
- OperandType type27(Type::TENSOR_FLOAT16, {2, 16, 4});
- OperandType type28(Type::TENSOR_FLOAT16, {16, 4});
- OperandType type8(Type::INT32, {});
- OperandType type9(Type::BOOL, {});
+ OperandType type15(Type::TENSOR_FLOAT16, {16});
+ OperandType type16(Type::TENSOR_FLOAT16, {2, 16});
+ OperandType type18(Type::TENSOR_FLOAT16, {16, 16});
+ OperandType type22(Type::TENSOR_FLOAT16, {0, 0, 0});
+ OperandType type26(Type::TENSOR_FLOAT16, {2, 16, 4});
+ OperandType type27(Type::TENSOR_FLOAT16, {16, 4});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::BOOL, {});
// Phase 1, operands
- auto input5 = model->addOperand(&type27);
- auto fw_weights5 = model->addOperand(&type28);
- auto fw_recurrent_weights5 = model->addOperand(&type20);
- auto fw_bias5 = model->addOperand(&type17);
- auto fw_hidden_state5 = model->addOperand(&type18);
- auto bw_weights5 = model->addOperand(&type28);
- auto bw_recurrent_weights5 = model->addOperand(&type20);
- auto bw_bias5 = model->addOperand(&type17);
- auto bw_hidden_state5 = model->addOperand(&type18);
- auto aux_input5 = model->addOperand(&type27);
- auto fw_aux_weights5 = model->addOperand(&type28);
- auto bw_aux_weights5 = model->addOperand(&type28);
- auto activation5 = model->addOperand(&type8);
- auto time_major5 = model->addOperand(&type9);
- auto merge_outputs5 = model->addOperand(&type9);
- auto fw_output5 = model->addOperand(&type23);
- auto bw_output4 = model->addOperand(&type23);
+ auto input5 = model->addOperand(&type26);
+ auto fw_weights5 = model->addOperand(&type27);
+ auto fw_recurrent_weights5 = model->addOperand(&type18);
+ auto fw_bias5 = model->addOperand(&type15);
+ auto fw_hidden_state5 = model->addOperand(&type16);
+ auto bw_weights5 = model->addOperand(&type27);
+ auto bw_recurrent_weights5 = model->addOperand(&type18);
+ auto bw_bias5 = model->addOperand(&type15);
+ auto bw_hidden_state5 = model->addOperand(&type16);
+ auto aux_input5 = model->addOperand(&type26);
+ auto fw_aux_weights5 = model->addOperand(&type27);
+ auto bw_aux_weights5 = model->addOperand(&type27);
+ auto activation5 = model->addOperand(&type7);
+ auto time_major5 = model->addOperand(&type8);
+ auto merge_outputs5 = model->addOperand(&type8);
+ auto fw_output5 = model->addOperand(&type22);
+ auto bw_output4 = model->addOperand(&type22);
// Phase 2, operations
static int32_t activation5_init[] = {1};
model->setOperandValue(activation5, activation5_init, sizeof(int32_t) * 1);
diff --git a/nn/runtime/test/generated/vts_models/bidirectional_sequence_rnn.model.cpp b/nn/runtime/test/generated/vts_models/bidirectional_sequence_rnn.model.cpp
index f62c6543a..c7eb8667f 100644
--- a/nn/runtime/test/generated/vts_models/bidirectional_sequence_rnn.model.cpp
+++ b/nn/runtime/test/generated/vts_models/bidirectional_sequence_rnn.model.cpp
@@ -86,7 +86,7 @@ Model createTestModel() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -95,7 +95,7 @@ Model createTestModel() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -104,7 +104,7 @@ Model createTestModel() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -274,7 +274,7 @@ Model createTestModel_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -283,7 +283,7 @@ Model createTestModel_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -292,7 +292,7 @@ Model createTestModel_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -463,7 +463,7 @@ Model createTestModel_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -472,7 +472,7 @@ Model createTestModel_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -481,7 +481,7 @@ Model createTestModel_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -651,7 +651,7 @@ Model createTestModel_dynamic_output_shape() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -660,7 +660,7 @@ Model createTestModel_dynamic_output_shape() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -669,7 +669,7 @@ Model createTestModel_dynamic_output_shape() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -839,7 +839,7 @@ Model createTestModel_dynamic_output_shape_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -848,7 +848,7 @@ Model createTestModel_dynamic_output_shape_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -857,7 +857,7 @@ Model createTestModel_dynamic_output_shape_relaxed() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1028,7 +1028,7 @@ Model createTestModel_dynamic_output_shape_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1037,7 +1037,7 @@ Model createTestModel_dynamic_output_shape_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1046,7 +1046,7 @@ Model createTestModel_dynamic_output_shape_float16() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1216,7 +1216,7 @@ Model createTestModel_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1225,7 +1225,7 @@ Model createTestModel_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1234,7 +1234,7 @@ Model createTestModel_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1404,7 +1404,7 @@ Model createTestModel_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1413,7 +1413,7 @@ Model createTestModel_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1422,7 +1422,7 @@ Model createTestModel_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1593,7 +1593,7 @@ Model createTestModel_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1602,7 +1602,7 @@ Model createTestModel_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1611,7 +1611,7 @@ Model createTestModel_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1781,7 +1781,7 @@ Model createTestModel_dynamic_output_shape_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1790,7 +1790,7 @@ Model createTestModel_dynamic_output_shape_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1799,7 +1799,7 @@ Model createTestModel_dynamic_output_shape_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1969,7 +1969,7 @@ Model createTestModel_dynamic_output_shape_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1978,7 +1978,7 @@ Model createTestModel_dynamic_output_shape_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -1987,7 +1987,7 @@ Model createTestModel_dynamic_output_shape_relaxed_2() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2158,7 +2158,7 @@ Model createTestModel_dynamic_output_shape_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2167,7 +2167,7 @@ Model createTestModel_dynamic_output_shape_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2176,7 +2176,7 @@ Model createTestModel_dynamic_output_shape_float16_2() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2346,7 +2346,7 @@ Model createTestModel_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2355,7 +2355,7 @@ Model createTestModel_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2364,7 +2364,7 @@ Model createTestModel_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2525,7 +2525,7 @@ Model createTestModel_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2534,7 +2534,7 @@ Model createTestModel_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2543,7 +2543,7 @@ Model createTestModel_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2705,7 +2705,7 @@ Model createTestModel_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2714,7 +2714,7 @@ Model createTestModel_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2723,7 +2723,7 @@ Model createTestModel_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2884,7 +2884,7 @@ Model createTestModel_dynamic_output_shape_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2893,7 +2893,7 @@ Model createTestModel_dynamic_output_shape_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -2902,7 +2902,7 @@ Model createTestModel_dynamic_output_shape_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3063,7 +3063,7 @@ Model createTestModel_dynamic_output_shape_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3072,7 +3072,7 @@ Model createTestModel_dynamic_output_shape_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3081,7 +3081,7 @@ Model createTestModel_dynamic_output_shape_relaxed_3() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3243,7 +3243,7 @@ Model createTestModel_dynamic_output_shape_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3252,7 +3252,7 @@ Model createTestModel_dynamic_output_shape_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3261,7 +3261,7 @@ Model createTestModel_dynamic_output_shape_float16_3() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3422,7 +3422,7 @@ Model createTestModel_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3431,7 +3431,7 @@ Model createTestModel_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3440,7 +3440,7 @@ Model createTestModel_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3610,7 +3610,7 @@ Model createTestModel_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3619,7 +3619,7 @@ Model createTestModel_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3628,7 +3628,7 @@ Model createTestModel_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3799,7 +3799,7 @@ Model createTestModel_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3808,7 +3808,7 @@ Model createTestModel_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3817,7 +3817,7 @@ Model createTestModel_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3987,7 +3987,7 @@ Model createTestModel_dynamic_output_shape_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -3996,7 +3996,7 @@ Model createTestModel_dynamic_output_shape_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4005,7 +4005,7 @@ Model createTestModel_dynamic_output_shape_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4175,7 +4175,7 @@ Model createTestModel_dynamic_output_shape_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4184,7 +4184,7 @@ Model createTestModel_dynamic_output_shape_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4193,7 +4193,7 @@ Model createTestModel_dynamic_output_shape_relaxed_4() {
},
{
.type = OperandType::TENSOR_FLOAT32,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4364,7 +4364,7 @@ Model createTestModel_dynamic_output_shape_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4373,7 +4373,7 @@ Model createTestModel_dynamic_output_shape_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
@@ -4382,7 +4382,7 @@ Model createTestModel_dynamic_output_shape_float16_4() {
},
{
.type = OperandType::TENSOR_FLOAT16,
- .dimensions = {0, 0},
+ .dimensions = {0},
.numberOfConsumers = 1,
.scale = 0.0f,
.zeroPoint = 0,
diff --git a/nn/runtime/test/specs/V1_2/bidirectional_sequence_rnn.mod.py b/nn/runtime/test/specs/V1_2/bidirectional_sequence_rnn.mod.py
index d6bf2d3e2..b0a250170 100644
--- a/nn/runtime/test/specs/V1_2/bidirectional_sequence_rnn.mod.py
+++ b/nn/runtime/test/specs/V1_2/bidirectional_sequence_rnn.mod.py
@@ -224,12 +224,11 @@ test(
bw_bias=Input("bw_bias", "TENSOR_FLOAT32", "{{ {} }}".format(bw_num_units)),
bw_hidden_state=Input("bw_hidden_state", "TENSOR_FLOAT32",
"{{ {}, {} }}".format(num_batches, bw_num_units)),
- aux_input=Input("aux_input", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
- 0, 0, 0)),
+ aux_input=Input("aux_input", "TENSOR_FLOAT32", "{0}"),
fw_aux_weights=Input("fw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
bw_aux_weights=Input("bw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
fw_output=Output(
"fw_output", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
num_batches, max_time, fw_num_units)),
@@ -275,12 +274,11 @@ test(
bw_bias=Input("bw_bias", "TENSOR_FLOAT32", "{{ {} }}".format(bw_num_units)),
bw_hidden_state=Input("bw_hidden_state", "TENSOR_FLOAT32",
"{{ {}, {} }}".format(num_batches, bw_num_units)),
- aux_input=Input("aux_input", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
- 0, 0, 0)),
+ aux_input=Input("aux_input", "TENSOR_FLOAT32", "{0}"),
fw_aux_weights=Input("fw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
bw_aux_weights=Input("bw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
fw_output=Output(
"fw_output", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
max_time, num_batches, fw_num_units)),
@@ -329,12 +327,11 @@ test(
bw_bias=Input("bw_bias", "TENSOR_FLOAT32", "{{ {} }}".format(bw_num_units)),
bw_hidden_state=Input("bw_hidden_state", "TENSOR_FLOAT32",
"{{ {}, {} }}".format(num_batches, bw_num_units)),
- aux_input=Input("aux_input", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
- 0, 0, 0)),
+ aux_input=Input("aux_input", "TENSOR_FLOAT32", "{0}"),
fw_aux_weights=Input("fw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
bw_aux_weights=Input("bw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ "{0}"),
fw_output=Output(
"fw_output", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
max_time, num_batches, fw_num_units + bw_num_units)),
@@ -386,12 +383,9 @@ test(
bw_bias=Input("bw_bias", "TENSOR_FLOAT32", "{{ {} }}".format(bw_num_units)),
bw_hidden_state=Input("bw_hidden_state", "TENSOR_FLOAT32",
"{{ {}, {} }}".format(num_batches, bw_num_units)),
- aux_input=Input("aux_input", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
- 0, 0, 0)),
- fw_aux_weights=Input("fw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
- bw_aux_weights=Input("bw_aux_weights", "TENSOR_FLOAT32",
- "{{ {}, {} }}".format(0, 0)),
+ aux_input=Input("aux_input", "TENSOR_FLOAT32", "{0}"),
+ fw_aux_weights=Input("fw_aux_weights", "TENSOR_FLOAT32", "{0}"),
+ bw_aux_weights=Input("bw_aux_weights", "TENSOR_FLOAT32", "{0}"),
fw_output=Output(
"fw_output", "TENSOR_FLOAT32", "{{ {}, {}, {} }}".format(
num_batches, max_time, fw_num_units)),